Compare commits
2 commits
af0e32eb68
...
f38bd73bf8
Author | SHA1 | Date | |
---|---|---|---|
f38bd73bf8 | |||
11d3594121 |
2 changed files with 96 additions and 74 deletions
|
@ -2,10 +2,22 @@
|
|||
IFS="
|
||||
"
|
||||
#Set your parameters here
|
||||
folder=/var/www/friendica
|
||||
storagefolder=storage
|
||||
#Name of the database
|
||||
db=friendica
|
||||
#User of the database
|
||||
user=root
|
||||
#Folder with the storage files to check
|
||||
storagefolder=/var/www/friendica/storage
|
||||
#The folder storage name, with slashes escaped to work through sed
|
||||
folderescaped=${storagefolder////\\/}
|
||||
|
||||
loop_1() {
|
||||
ks=$(echo "${p}" | sed -e "s/${folderescaped}//g" -e "s/\///g")
|
||||
e=$(sudo -u "${user}" mariadb "${db}" -N -B -q -e "select \`backend-ref\` from photo where \`backend-ref\` = '${ks}'")
|
||||
#If the file was not found in the database, but still exists in the filesystem, delete it
|
||||
if [[ -z "${e}" && -f "${p}" ]]; then
|
||||
sudo rm -rfv "${p}" #&> /dev/null
|
||||
else
|
||||
t=$(file "${p}")
|
||||
if [[ "${t}" =~ JPEG ]]; then
|
||||
nice -n 10 jpegoptim -m 76 "${p}" #&> /dev/null
|
||||
|
@ -28,9 +40,10 @@ loop_1() {
|
|||
size_new=$(stat -c%s "${tmppic}" 2>/dev/null || echo 0)
|
||||
size_original=$(stat -c%s "${p}" 2>/dev/null || echo 0)
|
||||
if [[ "${size_original}" -gt "${size_new}" ]]; then
|
||||
mv "${tmppic}" "${p}" #&> /dev/null
|
||||
mv -v "${tmppic}" "${p}" #&> /dev/null
|
||||
else
|
||||
rm "${tmppic}" #&> /dev/null
|
||||
rm -v "${tmppic}" #&> /dev/null
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
@ -38,7 +51,11 @@ loop_1() {
|
|||
fi
|
||||
}
|
||||
|
||||
find "${folder}/${storagefolder}" -depth -mindepth 2 -type f -size +50k -atime -8 -not -iname "index.html" | (
|
||||
#Generate an index to make searches faster
|
||||
echo "Generating photo index..." #&> /dev/null
|
||||
sudo mariadb "${db}" -e "alter table photo add index if not exists backend_index (\`backend-ref\`)" #&> /dev/null
|
||||
echo "Generating list of files..." #&> /dev/null
|
||||
find "${storagefolder}" -depth -mindepth 2 -type f -size +50k -mtime -8 -not -iname "index.html" | (
|
||||
while read -r p; do
|
||||
loop_1 "${p}" &
|
||||
until [[ $(jobs -r -p | wc -l) -lt $(($(getconf _NPROCESSORS_ONLN) / 2)) ]]; do
|
||||
|
@ -47,3 +64,5 @@ find "${folder}/${storagefolder}" -depth -mindepth 2 -type f -size +50k -atime -
|
|||
done
|
||||
)
|
||||
wait
|
||||
#Drop the index in the end to save storage
|
||||
sudo mariadb "${db}" -e "alter table photo drop index backend_index" #&> /dev/null
|
||||
|
|
|
@ -63,8 +63,11 @@ url="https://www.youtube.com/@${channel}"
|
|||
if [[ "${channel}" = "subscriptions" ]]; then
|
||||
url="https://www.youtube.com/feed/subscriptions"
|
||||
fi
|
||||
for full_url in "${url}/videos" "${url}/shorts" "${url}/streams"; do
|
||||
echo "${full_url}"
|
||||
#for section_url in "${url}/videos" "${url}/shorts" "${url}/streams"; do
|
||||
#Via https://github.com/yt-dlp/yt-dlp/issues/13573#issuecomment-3020152141
|
||||
full_url=$(yt-dlp -I0 --print "playlist:https://www.youtube.com/playlist?list=UU%(channel_id.2:)s" "${url}")
|
||||
#full_url=$(curl "${url}" | tr -d "\n\r" | xmlstarlet fo -R -n -H 2>/dev/null | xmlstarlet sel -t -v "/html" -n | grep "/channel/UC" | sed -e "s/var .* = //g" -e "s/\};/\}/g" -e "s/channel\/UC/playlist\?list=UU/g" | jq -r ".metadata .channelMetadataRenderer .channelUrl")
|
||||
echo "${url} = ${full_url}"
|
||||
if [[ -f "${cookies}" || "${channel}" = "subscriptions" ]]; then
|
||||
#If available, you can use the cookies from your browser directly. Substitute
|
||||
# --cookies "${cookies}"
|
||||
|
@ -79,9 +82,9 @@ for full_url in "${url}/videos" "${url}/shorts" "${url}/streams"; do
|
|||
#including the backslash so the multiline command keeps working.
|
||||
"${python}" "${ytdl}" "${full_url}" \
|
||||
--cookies "${cookies}" \
|
||||
--extractor-args "youtubetab:approximate_date" \
|
||||
--skip-download --download-archive "${archive}" \
|
||||
--dateafter "${breaktime}" \
|
||||
--extractor-args "youtubetab:approximate_date,youtubetab:skip=webpage" \
|
||||
--break-on-reject --lazy-playlist --write-info-json \
|
||||
--sleep-requests "${sleeptime}" \
|
||||
--parse-metadata "video::(?P<formats>)" \
|
||||
|
@ -94,9 +97,9 @@ for full_url in "${url}/videos" "${url}/shorts" "${url}/streams"; do
|
|||
--parse-metadata "video::(?P<categories>)"
|
||||
else
|
||||
"${python}" "${ytdl}" "${full_url}" \
|
||||
--extractor-args "youtubetab:approximate_date" \
|
||||
--skip-download --download-archive "${archive}" \
|
||||
--dateafter "${breaktime}" \
|
||||
--extractor-args "youtubetab:approximate_date,youtubetab:skip=webpage" \
|
||||
--break-on-reject --lazy-playlist --write-info-json \
|
||||
--sleep-requests "${sleeptime}" \
|
||||
--parse-metadata "video::(?P<formats>)" \
|
||||
|
@ -108,7 +111,7 @@ for full_url in "${url}/videos" "${url}/shorts" "${url}/streams"; do
|
|||
--parse-metadata "video::(?P<tags>)" \
|
||||
--parse-metadata "video::(?P<categories>)"
|
||||
fi
|
||||
done
|
||||
#done
|
||||
if [[ ${enablecsv} = 1 ]]; then
|
||||
if [[ -f "${tmpcsv}" ]]; then
|
||||
rm -rf "${tmpcsv}"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue