1.更新大图压缩逻辑。

master
Qihua Pan 3 years ago
parent 7a0f876df1
commit 470f99bbc2
  1. 7
      pixiv/config.json.example
  2. 55
      pixiv/pixiv.sh

@ -19,11 +19,8 @@
"db":"文件上传成功记录数据库",
"imageParam":"阿里云图片处理参数 比如:?x-oss-process=image/format,webp/resize,w_2560,h_2560"
},
"compress":{
"Host":"图片压缩服务器地址",
"Port":"端口",
"User":"用户",
"Path":"压缩图片输出绝对路径"
"convertio":{
"apikey":"https://developers.convertio.co/zh/ 申请的API密钥",
},
"Cronitor":{
"API_KEY":"https://cronitor.io/ 申请的Key",

@ -30,10 +30,7 @@ db_file=`cat $config_file | jq -r .aliyun_oss.db`
db_file_err=$db_file.err
imageParam=`cat $config_file | jq -r .aliyun_oss.imageParam`
CompressHost=`cat $config_file | jq -r .compress.Host`
CompressPort=`cat $config_file | jq -r .compress.Port`
CompressUser=`cat $config_file | jq -r .compress.User`
CompressPath=`cat $config_file | jq -r .compress.Path`
convertio_apikey=`cat $config_file | jq -r .convertio.apikey`
CronitorKey=`cat $config_file | jq -r .Cronitor.API_KEY`
CronitorJobName=`cat $config_file | jq -r .Cronitor.JOB_NAME`
@ -156,12 +153,12 @@ do
json_file=$pid.json
if [ ! -f $json_file ]
then
egrep -o "content='{\"timestamp.*].{3}" $png_html_file | sed -e "s/content='//" >$json_file
egrep -o "content='{\"timestamp.*" $png_html_file|sed -e "s/content='//"|sed -e "s/..$//" >$json_file
fi
pageCount=`jq --arg pid $pid '.illust[$pid].pageCount' $json_file`
original_url=`jq -r --arg pid $pid '.illust[$pid].urls.original' $json_file`
small_url=`jq -r --arg pid $pid '.illust[$pid].urls.small' $json_file`
title=`jq -r --arg pid $pid '.illust[$pid].title' $json_file|sed -e 's/\"/\\\"/g'`
title=`jq -r --arg pid $pid '.illust[$pid].title' $json_file|sed -e 's/\"/\\\"/g'|sed -e 's/<//g'|sed -e 's/>//g'`
title=`bash -c "echo '$title'|$rule"`
description=`jq -r --arg pid $pid '.illust[$pid].description' $json_file`
userName=`jq -r --arg pid $pid '.illust[$pid].userName' $json_file|sed -e 's/\"/\\\"/g'`
@ -208,15 +205,44 @@ do
then
curl -v -H 'referer: https://www.pixiv.net/' $page_original_url -o $original_file_name
fi
if [ `du -b $original_file_name|awk '{print $1}'` -gt $maxFileSize ]
original_file_size=`du -b $original_file_name|awk '{print $1}'`
if [ $original_file_size -gt $maxFileSize ]
then
echo "图片:$original_file_name 体积:$original_file_size 超过 $maxFileSize_M,需要压缩"
scp -i ~/.ssh/$CompressHost -P $CompressPort $original_file_name $CompressUser@$CompressHost:$CompressPath/$original_file_name
ssh -i ~/.ssh/$CompressHost -p $CompressPort $CompressUser@$CompressHost "cd $CompressPath;jpegoptim --size=$maxFileSize_M $original_file_name"
scp -i ~/.ssh/$CompressHost -P $CompressPort $CompressUser@$CompressHost:$CompressPath/$original_file_name $original_file_name
echo "图片:$original_file_name 压缩体积:`du -h $original_file_name`"
echo "${original_file_name}文件体积超过${maxFileSize}字节,需要在线压缩"
result=`curl -v -X POST -d "{\"apikey\": \"$convertio_apikey\", \"input\":\"upload\", \"outputformat\":\"jpeg\"}" http://api.convertio.co/convert`
if [ `echo $result|jq -r .status` = ok ]
then
id=`echo $result|jq -r .data.id`
result=`curl -v -X PUT --upload-file $original_file_name http://api.convertio.co/convert/$id/$original_file_name`
if [ `echo $result|jq -r .status` = ok ]
then
while true
do
sleep 10
result=`curl -v -X GET http://api.convertio.co/convert/$id/status`
if [[ `echo $result|jq -r .status` = ok && `echo $result|jq -r .data.step` = finish ]]
then
echo $result|jq .data.output.url|xargs curl -v -o $original_file_name
upload $original_file_name
break
elif [ `echo $result|jq -r .status` = error ]
then
break
else
echo "10s后重新获取转换结果"
fi
done
else
echo "Direct File Upload For Conversion error!!!"
fi
else
echo "Start a New Conversion error!!!"
fi
else
upload $original_file_name
fi
upload $original_file_name
fi
if [ ! -f $webp_file_name ]
then
@ -268,3 +294,6 @@ done
next_expected_at=`curl -v https://cronitor.io/api/monitors/$CronitorJobName -u $CronitorKey:''|jq .next_expected_at`
curl -v -d chat_id=$chat_id -d text="以上就是$today日榜前${length}名作品,本次推送完毕,下次推送时间预计是`date -d @$next_expected_at '+%Y-%m-%d %H:%M:%S'`,如有问题请联系管理员。 #date$_today #日期$_today " $baseApi/sendMessage
find -type f -mtime +7|grep html
find -type f -mtime +7|grep json
find -type f -mtime +7|grep webp
Loading…
Cancel
Save