aboutsummaryrefslogtreecommitdiff
path: root/lib/_ak_fs
diff options
context:
space:
mode:
Diffstat (limited to 'lib/_ak_fs')
-rwxr-xr-xlib/_ak_fs653
1 files changed, 512 insertions, 141 deletions
diff --git a/lib/_ak_fs b/lib/_ak_fs
index f55e58e..a0f5619 100755
--- a/lib/_ak_fs
+++ b/lib/_ak_fs
@@ -1,18 +1,46 @@
-#!/bin/bash
-
-source $AK_LIBDIR/_ak_log
+#!/usr/bin/env bash
+###
+### arching-kaos-tools
+### Tools to interact and build an Arching Kaos Infochain
+### Copyright (C) 2021 - 2025 kaotisk
+###
+### This program is free software: you can redistribute it and/or modify
+### it under the terms of the GNU General Public License as published by
+### the Free Software Foundation, either version 3 of the License, or
+### (at your option) any later version.
+###
+### This program is distributed in the hope that it will be useful,
+### but WITHOUT ANY WARRANTY; without even the implied warranty of
+### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+### GNU General Public License for more details.
+###
+### You should have received a copy of the GNU General Public License
+### along with this program. If not, see <http://www.gnu.org/licenses/>.
+###
+
+source $AK_LIBDIR/_ak_lib_load
+_ak_lib_load _ak_log
+_ak_lib_load _ak_hash_exchange
+
+function _ak_fs_dir_init_setup(){
+ _ak_check_and_create_dir $AK_MAPSDIR
+ _ak_check_and_create_dir $AK_CHUNKSDIR
+ _ak_check_and_create_dir $AK_LEAFSDIR
+}
-_ak_fs_return_hash_path(){
- hashpath="$(echo -n "$1" |sed 's/./&\//g;s/\/$//g')"
- echo -n "$hashpath"
+function _ak_fs_return_hash_path(){
+ #hashpath="$(echo -n "$1" |sed 's/./&\//g;s/\/$//g')"
+ #hashpath="$(echo -n "$1" | fold -w 4 | tr $'\n' '/')"
+ echo -n "$1"
}
-_ak_fs_return_hash_dir(){
- hashdir="$(echo -n "$1" | sed -e 's/./&\//g' | grep '\(./\)\{128\}' | sed -e 's/..$//')"
- echo -n "$hashdir"
+function _ak_fs_return_hash_dir(){
+ #hashdir="$(_ak_fs_return_hash_path $1 | sed -e 's/.....$//')"
+ #echo -n "$hashdir"
+ echo -n "$1"
}
-_ak_fs_verify_input_is_hash(){
+function _ak_fs_verify_input_is_hash(){
if [ ! -z "$1" ] && echo "$1" | grep '[0123456789abcdef]\{128\}' > /dev/null 2>&1
then
return 0
@@ -22,7 +50,7 @@ _ak_fs_verify_input_is_hash(){
fi
}
-_ak_fs_create_dir_for_hash(){
+function _ak_fs_create_dir_for_hash(){
_ak_fs_verify_input_is_hash $2
if [ ! -z $1 ] && [ ! -z $2 ] && [ -n "$1" ] && [ -n "$2" ]
then
@@ -31,14 +59,16 @@ _ak_fs_create_dir_for_hash(){
}
# Append last chk if not even number
-_ak_fs_appendLastIfNotEven(){
+function _ak_fs_appendLastIfNotEven(){
if [ "$(( $(wc -l "$1" | awk '{ print $1 }') % 2))" -ne 0 ]
then
- tail -n 1 "$1" >> "$1"
+ tail -n 1 "$1" >> "$1.temp"
+ cat "$1.temp" >> "$1"
+ rm $1.temp
fi
}
-_ak_fs_import(){
+function _ak_fs_import(){
#
# The concept is bit more complicated now
#
@@ -59,77 +89,16 @@ _ak_fs_import(){
#
# 6. We are done!
#
-
# A temporary root dir to work on
- TEMPORARYDIR="$(_ak_make_temp_directory)"
+ TEMPDIR="$(_ak_make_temp_directory)"
# A subdir to split the files there
- TECHDIR="$TEMPORARYDIR/chks"
+ TCHKDIR="$TEMPDIR/chks"
# A pin point to return from where we came from
CURRENTDIR="$(pwd)"
-
# Checking directories and create them if necessary
- # rm -rf $TEMPORARYDIR
-
- # TECHDIR
- if [ ! -d "$TECHDIR" ]
- then
- mkdir -p "$TECHDIR"
- if [ $? -eq 0 ]
- then
- _ak_log_info "Folder $TECHDIR created!"
- else
- _ak_log_error "Problem occured while creating $TECHDIR"
- exit 1
- fi
- else
- _ak_log_debug "Temp dir found"
- fi
-
- # AK_MAPSDIR
- if [ ! -d "$AK_MAPSDIR" ]
- then
- mkdir -p "$AK_MAPSDIR"
- if [ $? -eq 0 ]
- then
- _ak_log_debug "Folder $AK_MAPSDIR created!"
- else
- _ak_log_error "Problem occured while creating $AK_MAPSDIR"
- exit 1
- fi
- else
- _ak_log_debug "Mapsdir found"
- fi
-
- # AK_CHUNKSDIR
- if [ ! -d "$AK_CHUNKSDIR" ]
- then
- mkdir -p "$AK_CHUNKSDIR"
- if [ $? -eq 0 ]
- then
- _ak_log_info "Folder $AK_CHUNKSDIR created!"
- else
- _ak_log_error "Problem occured while creating $AK_CHUNKSDIR"
- exit 1
- fi
- else
- _ak_log_debug "Workdir found"
- fi
-
- # AK_LEAFSDIR
- if [ ! -d "$AK_LEAFSDIR" ]
- then
- mkdir -p "$AK_LEAFSDIR"
- if [ $? -eq 0 ]
- then
- _ak_log_info "Folder $AK_LEAFSDIR created!"
- else
- _ak_log_error "Problem occured while creating $AK_LEAFSDIR"
- echo "ERROR Can't create $AK_LEAFSDIR"
- exit 1
- fi
- else
- _ak_log_debug "Workdir found"
- fi
+ # rm -rf $TEMPDIR
+ _ak_check_and_create_dir $TCHKDIR
+ _ak_fs_dir_init_setup
if [ ! -f "$1" ]
then
_ak_log_error "File $1 not found"
@@ -142,21 +111,19 @@ _ak_fs_import(){
# sha256sum as quick pointer to root of the tree
#
_ak_log_info "Storing original hash of $1 along with its name"
- sha512sum "$1" > $TEMPORARYDIR/3rd_gen_map
+ sha512sum "$1" > $TEMPDIR/3rd_gen_map
_ak_log_info "Encoding to base64"
base64 $1 > file
FILE="file"
fi
-
# Uncomment next line in case you want to debug the resulting script as well
- # echo 'set -xe' > $TEMPORARYDIR/cmd_queue.sh
-
+ # echo 'set -xe' > $TEMPDIR/cmd_queue.sh
# We get the SHA512 hash for the $FILE given
CHECKSUM=$(sha512sum "$FILE"|awk '{print $1}')
- FILE_SIZE="$(du -b $FILE | awk '{ print $1 }')"
+ FILE_SIZE="$( (du -b $FILE||stat -f %z $FILE)2>/dev/null | awk '{ print $1 }')"
if [ $FILE_SIZE -lt 4097 ]
then
- cp $FILE "$TECHDIR/$(basename "$FILE")-00000000000000000000000000000000000000000000000000.chk"
+ cp $FILE "$TCHKDIR/chunk-0000000000000"
else
FACTOR=1024
while [ $(( $FILE_SIZE / $FACTOR )) -gt 250 ]
@@ -165,23 +132,24 @@ _ak_fs_import(){
done
_ak_log_info "Gonna split in $FACTOR size"
sleep 3
- # We split the file into 4*1024 bytes and output the chunks into TECHDIR
- split -a 50 -b $FACTOR --additional-suffix ".chk" -d "$FILE" "$TECHDIR/$(basename "$FILE")-"
+ # We split the file into 4*1024 bytes and output the chunks into TCHKDIR
+ split -a 13 -b $FACTOR -d "$FILE" "$TCHKDIR/chunk-"
fi
_ak_log_info "File done splitting"
-
# We go over there...
- cd $TECHDIR
+ cd $TCHKDIR
#set -xe
# We get every chunks' SHA512 and we craft a script to rename the chunks and
# move them to AK_CHUNKSDIR
- for file in $TEMPORARYDIR/chks/*
+ for file in *
do
- sha512sum $file >> $TEMPORARYDIR/map
+ _ak_log_debug "Hashing ${file}..."
+ sha512sum $file >> $TEMPDIR/map
+ _ak_log_debug "Hashing of ${file} completed"
done
- _ak_fs_appendLastIfNotEven "$TEMPORARYDIR/map"
+ _ak_fs_appendLastIfNotEven "$TEMPDIR/map"
# Figure out how many times we need to pack
- totalChunks=`grep 'chk' $TEMPORARYDIR/map | wc -l`
+ totalChunks=`grep 'chunk-' $TEMPDIR/map | wc -l`
temp="$totalChunks"
timesRan=0
while [ $temp -ne 1 ]
@@ -191,8 +159,7 @@ _ak_fs_import(){
done
_ak_log_debug "Ran $timesRan times"
_ak_log_debug "Total chunks $totalChunks"
-
- workingIndex="$TEMPORARYDIR/map"
+ workingIndex="$TEMPDIR/map"
c=$timesRan
while [ $c -ne 0 ]
do
@@ -220,61 +187,49 @@ _ak_fs_import(){
totalChunks=`cat $workingIndex | wc -l`
c=`expr $c - 1`
done
-
if [ -f level.1.map ]
then
- sha512sum level.1.map
- sha512sum level.1.map >> $TEMPORARYDIR/3rd_gen_map
+ # sha512sum level.1.map
+ sha512sum level.1.map >> $TEMPDIR/3rd_gen_map
else
- echo error
+ ak_log_error "Got error an error, level.1.map is missing"
exit 1
fi
-
# Reset file with uniq
- cat $TEMPORARYDIR/map | uniq > $TEMPORARYDIR/map2
- cat $TEMPORARYDIR/map2 > $TEMPORARYDIR/map
- rm $TEMPORARYDIR/map2
-
+ cat $TEMPDIR/map | uniq > $TEMPDIR/map2
+ cat $TEMPDIR/map2 > $TEMPDIR/map
+ rm $TEMPDIR/map2
counter=0
while IFS="" read -r p || [ -n "$p" ]
do
-# printf "mv %s %s/%s\n" "$(echo $p | awk '{ print $2 }')" "$AK_CHUNKSDIR" "$(echo $p | awk '{ print $1 }')" >> $TEMPORARYDIR/cmd_queue.sh
+# printf "mv %s %s/%s\n" "$(echo $p | awk '{ print $2 }')" "$AK_CHUNKSDIR" "$(echo $p | awk '{ print $1 }')" >> $TEMPDIR/cmd_queue.sh
#mkdir -p $AK_CHUNKSDIR/$(echo $p | awk '{ print $1 }')
cp $(echo $p | awk '{ print $2 }') $AK_CHUNKSDIR/$(echo $p | awk '{ print $1 }')
counter=`expr "$counter" + 1`
- done < $TEMPORARYDIR/map
-
+ done < $TEMPDIR/map
# We run the crafted script
-# sh $TEMPORARYDIR/cmd_queue.sh
-
+# sh $TEMPDIR/cmd_queue.sh
# and we delete it
-# rm $TEMPORARYDIR/cmd_queue.sh
-
+# rm $TEMPDIR/cmd_queue.sh
# We inform the map about the original $FILE name and SHA512
-# echo "$CHECKSUM $(basename "$FILE")" >> $TEMPORARYDIR/map
-
+# echo "$CHECKSUM $(basename "$FILE")" >> $TEMPDIR/map
# We get the SHA512 hash of the resulted map file
-# MAPFILEHASH="$(sha512sum $TEMPORARYDIR/map | awk '{ print $1 }')"
-
+# MAPFILEHASH="$(sha512sum $TEMPDIR/map | awk '{ print $1 }')"
# and we rename it with it and move it to AK_MAPSDIR
-# `sha512sum $TEMPORARYDIR/map | awk '{print "mv " $2 " '$AK_MAPSDIR/'" $1}'`
-
- mp512p="$(sha512sum $TEMPORARYDIR/3rd_gen_map | awk '{print $1}')"
- mv $TEMPORARYDIR/3rd_gen_map $AK_MAPSDIR/$mp512p
-
- # We remove the TEMPORARYDIR
- rm -rf $TEMPORARYDIR
-
+# `sha512sum $TEMPDIR/map | awk '{print "mv " $2 " '$AK_MAPSDIR/'" $1}'`
+ mp512p="$(sha512sum $TEMPDIR/3rd_gen_map | awk '{print $1}')"
+ cp $TEMPDIR/3rd_gen_map $AK_MAPSDIR/$mp512p
+ # We remove the TEMPDIR
+ rm -rf $TEMPDIR
# and print the MAPFILEHASH
echo "$mp512p"
-
}
-_ak_fs_find_depth(){
+function _ak_fs_find_depth(){
currentNode="$1"
#pathToNode="$AK_LEAFSDIR/$(_ak_fs_return_hash_path $currentNode)"
pathToNode="$AK_LEAFSDIR/$currentNode"
- if [ -f $pathToNode ] && [ "$(du -b $pathToNode | awk '{print $1}')" == "258" ]
+ if [ -f $pathToNode ] && [ "$( (du -b $pathToNode||stat -f %z $pathToNode)2>/dev/null | awk '{print $1}')" == "258" ]
then
fileHead="$(head -n 1 $pathToNode)"
counter="$(expr $counter + 1)"
@@ -285,7 +240,7 @@ _ak_fs_find_depth(){
else
exit 111
# Try to download stuff
- # wget -s $remoteMrk/$currentNode -O $AK_LEAFSDIR/$currentNode
+ # curl -s $remoteMrk/$currentNode -o $AK_LEAFSDIR/$currentNode
# if [ $? -ne 0 ]
# then
# exit 111
@@ -294,29 +249,29 @@ _ak_fs_find_depth(){
fi
}
-_ak_fs_cat(){
+function _ak_fs_cat(){
if [ -z $1 ]
then
echo "Please provide a SHA512 hash"
exit 1
fi
- echo $1 | grep "[0123456789abcdef]\{128\}"
+ echo $1 | grep "[0123456789abcdef]\{128\}" > /dev/null 2>&1
if [ $? -ne 0 ]
then
- echo "Look, I asked for a SHA512 hash, please try again"
+ _ak_log_error "Look, I asked for a SHA512 hash, please try again"
exit 1
fi
treeRootHash="$1"
# Enter temp folder
- TEMPASSIN="$(_ak_make_temp_directory)"
- cd $TEMPASSIN
+ TEMPDIR="$(_ak_make_temp_directory)"
+ cd $TEMPDIR
currentNode="$treeRootHash"
counter=0
printf "%s" "$currentNode" > workspace.0
_ak_fs_find_depth "$currentNode"
depth="$(expr `cat depth` + 1)"
counter="0"
- printf "%s" "$depth"
+ # printf "%s" "$depth"
if [ -f output ]
then
rm output
@@ -330,7 +285,7 @@ _ak_fs_cat(){
nextLevel="$(expr $counter + 1)"
if [ "$p" == "" ]
then
- echo hi
+ _ak_log_debug "Got empty line"
else
#expectedPath="$AK_LEAFSDIR/$(_ak_fs_return_hash_path $p)"
expectedPath="$AK_LEAFSDIR/$p"
@@ -352,12 +307,11 @@ _ak_fs_cat(){
done < workspace.$counter
counter="$(expr $counter + 1)"
done
-
base64 -d output
_ak_log_info "Recreation of $treeRootHash succeeded!"
}
-_ak_fs_export(){
+function _ak_fs_export(){
if [ -z $1 ]
then
_ak_log_error "Please provide a SHA512 hash"
@@ -369,21 +323,21 @@ _ak_fs_export(){
exit 2
fi
outputFilename="$2"
- echo $1 | grep "[0123456789abcdef]\{128\}"
+ echo $1 | grep "[0123456789abcdef]\{128\}" > /dev/null 2>&1
if [ $? -ne 0 ]
then
_ak_log_error "Look, I asked for a SHA512 hash, please try again"
exit 1
fi
- _ak_fs_export "$1" > $outputFilename
+ _ak_fs_cat "$1" > $outputFilename
}
-_ak_fs_list(){
+function _ak_fs_list(){
if [ -d "${AK_MAPSDIR}" ]
then
find $AK_MAPSDIR -type f | while read fina
do
- cat $fina | tr $'\n' ' ' | awk '{ print $2 " " $3 }'
+ cat $fina | tr '\n' ' ' | awk '{ print "'$(basename ${fina})' " $3 " " $2 }'
done
else
_ak_log_debug "Making ${AK_MAPSDIR} directory"
@@ -391,3 +345,420 @@ _ak_fs_list(){
_ak_log_debug "Empty directory"
fi
}
+
+function _ak_fs_from_map_get_original_hash(){
+ if [ ! -z "$1" ] && [ -n "$1" ]
+ then
+ if [ -f "$AK_MAPSDIR/$1" ]
+ then
+ cat $AK_MAPSDIR/$1 | tr '\n' ' ' | awk '{ print $1 }'
+ else
+ _ak_log_error "Map $1 was not found"
+ fi
+ else
+ _ak_log_error "No input given"
+ fi
+}
+
+function _ak_fs_from_map_get_original_filename(){
+ if [ ! -z "$1" ] && [ -n "$1" ]
+ then
+ if [ -f "$AK_MAPSDIR/$1" ]
+ then
+ cat $AK_MAPSDIR/$1 | sed -e 's/ / /g;' | tr '\n' ' ' | awk '{ print $2 }'
+ else
+ _ak_log_error "Map $1 was not found"
+ fi
+ else
+ _ak_log_error "No input given"
+ fi
+}
+
+function _ak_fs_from_map_get_root_hash(){
+ if [ ! -z "$1" ] && [ -n "$1" ]
+ then
+ if [ -f "$AK_MAPSDIR/$1" ]
+ then
+ cat $AK_MAPSDIR/$1 | tr '\n' ' ' | awk '{ print $3 }'
+ else
+ _ak_log_error "Map $1 was not found"
+ fi
+ else
+ _ak_log_error "No input given"
+ fi
+}
+
+function _ak_fs_cat_from_map_hash(){
+ if _ak_fs_verify_input_is_hash "$1"
+ then
+ _ak_fs_cat `_ak_fs_from_map_get_root_hash $1`
+ fi
+}
+
+function _ak_fs_get_from_map_hash(){
+ if _ak_fs_verify_input_is_hash "$1"
+ then
+ _ak_fs_export `_ak_fs_from_map_get_root_hash $1` "$(basename `_ak_fs_from_map_get_original_filename $1`)"
+ fi
+}
+
+function _ak_net_try_leaf(){
+ if [ ! -z "$1" ] && [ -n "$1" ]
+ then
+ if [ ! -f "$AK_LEAFSDIR/$1" ]
+ then
+ _ak_he_list_peers | while read peer
+ do
+ _ak_log_info "Searching as a leaf... $1 @ ${peer}"
+ request_url="$(_ak_he_url_request_leaf_hash_from_peer ${peer} $1)"
+ curl -s --connect-timeout 3 -o $1 ${request_url}
+ if [ $? -ne 0 ]
+ then
+ _ak_log_warning "Curl to ${request_url} failed."
+ continue
+ fi
+ if [ -f $1 ]
+ then
+ _ak_log_info "${request_url} gave out a file"
+ if [ "$(sha512sum $1 | awk '{print $1}')" == "$1" ]
+ then
+ _ak_log_info "File hash match"
+ if [ "$(cat $1 | wc -l)" == 2 ]
+ then
+ _ak_log_info "File has two lines"
+ if [ "$(cat $1 | grep '^[0-9a-z]\{128\}$' | wc -l)" == 2 ]
+ then
+ _ak_log_info "File has two lines which are 2 hashes"
+ # cat $1
+ cp $1 ${AK_LEAFSDIR}/$1
+ exit
+ break
+ else
+ _ak_log_warning "File $1 doesn't match expected format (two hashes)"
+ rm $1
+ fi
+ else
+ _ak_log_warning "File $1 doesn't match expected format (two lines)"
+ rm $1
+ fi
+ else
+ # cat $1 | jq >&2
+ _ak_log_warning "File $1 doesn't match expected hash: $(sha512sum $1 | awk '{print $1}')"
+ rm $1
+ fi
+ else
+ _ak_log_warning "No file $1"
+ fi
+ done
+ fi
+ fi
+}
+function _ak_net_try_chunk(){
+ if [ ! -z "$1" ] && [ -n "$1" ]
+ then
+ if [ ! -f "$AK_CHUNKSDIR/$1" ]
+ then
+ _ak_he_list_peers | while read peer
+ do
+ _ak_log_info "Searching as a chunk... $1 @ ${peer}"
+ request_url="$(_ak_he_url_request_chunk_hash_from_peer ${peer} $1)"
+ curl -s --connect-timeout 3 -o $1 ${request_url}
+ if [ $? -ne 0 ]
+ then
+ _ak_log_warning "Curl to $request_url failed."
+ continue
+ fi
+ if [ -f $1 ]
+ then
+ _ak_log_info "File found"
+ if [ "$(sha512sum $1 | awk '{print $1}')" == "$1" ]
+ then
+ _ak_log_info "File is valid"
+ if [ "$(cat $1 | grep -v '^[-A-Za-z0-9+/]*=\{0,3\}$')" == "" ]
+ then
+ _ak_log_info "File is base64"
+ # cat $1
+ cp $1 ${AK_CHUNKSDIR}/$1
+ exit
+ break
+ else
+ _ak_log_warning "File $1 doesn't match expected format (base64)"
+ rm $1
+ fi
+ else
+ # cat $1 | jq >&2
+ _ak_log_warning "File $1 doesn't match expected hash: $(sha512sum $1 | awk '{print $1}')"
+ rm $1
+ fi
+ else
+ _ak_log_warning "No file $1"
+ fi
+ done
+ fi
+ fi
+}
+
+function _ak_fs_net_find_depth(){
+ currentNode="$1"
+ pathToNode="$AK_LEAFSDIR/$currentNode"
+ _ak_net_try_leaf ${currentNode}
+ _ak_net_try_chunk ${currentNode}
+ if [ -f $pathToNode ] && [ "$( (du -b $pathToNode||stat -f %z $pathToNode)2>/dev/null | awk '{print $1}')" == "258" ]
+ then
+ fileHead="$(head -n 1 $pathToNode)"
+ counter="$(expr $counter + 1)"
+ _ak_fs_net_find_depth "$fileHead"
+ elif [ ! -f $pathToNode ]
+ then
+ printf "%s" "$counter" > depth
+ else
+ pwd
+ exit 111
+ # Try to download stuff
+ # curl -s $remoteMrk/$currentNode -o $AK_LEAFSDIR/$currentNode
+ # if [ $? -ne 0 ]
+ # then
+ # exit 111
+ # fi
+ # _ak_fs_find_depth "$currentNode"
+ fi
+}
+
+function _ak_fs_net_cat(){
+ if [ -z $1 ]
+ then
+ echo "Please provide a SHA512 hash"
+ exit 1
+ fi
+ echo $1 | grep "[0123456789abcdef]\{128\}" > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ echo "Look, I asked for a SHA512 hash, please try again"
+ exit 1
+ fi
+ treeRootHash="$1"
+ TEMPDIR="$(_ak_make_temp_directory)"
+ cd $TEMPDIR
+ _ak_log_debug "We are here: $(pwd)"
+ currentNode="$treeRootHash"
+ counter=0
+ printf "%s" "$currentNode" > workspace.0
+ while [ ! -f "$currentNode" ]
+ do
+ curl -s -o ${currentNode} $(_ak_he_url_request_leaf_hash_from_random_peer ${currentNode})
+ done
+ _ak_fs_net_find_depth "$currentNode"
+ depth="$(expr `cat depth` + 1)"
+ counter="0"
+ # printf "%s" "$depth"
+ if [ -f output ]
+ then
+ rm output
+ fi
+ touch output
+ while [ "$counter" -lt "$depth" ]
+ do
+ _ak_log_debug "Entering loop... $counter $depth"
+ while IFS="" read -r p || [ -n "$p" ]
+ do
+ nextLevel="$(expr $counter + 1)"
+ if [ "$p" == "" ]
+ then
+ _ak_log_debug "Got empty line"
+ else
+ expectedPath="$AK_LEAFSDIR/$p"
+ if [ -f $expectedPath ]
+ then
+ _ak_log_info "Leaf available"
+ if [ "$(head -n 1 $expectedPath)" == "$(tail -n 1 $expectedPath)" ]
+ then
+ head -n 1 $expectedPath >> workspace.$nextLevel
+ else
+ cat $expectedPath >> workspace.$nextLevel
+ fi
+ elif [ -f $AK_CHUNKSDIR/$p ]
+ then
+ _ak_log_info "Chunk available"
+ cat $AK_CHUNKSDIR/$p >> output
+ else
+ _ak_log_info "Retrying with network"
+ _ak_net_try_leaf $p
+ _ak_net_try_chunk $p
+ _ak_log_info "After network try..."
+ if [ -f $AK_LEAFSDIR/$1 ]
+ then
+ _ak_log_info "Leaf available over net"
+ if [ "$(head -n 1 $AK_LEAFSDIR/$1)" == "$(tail -n 1 $AK_LEAFSDIR/$1)" ]
+ then
+ head -n 1 $AK_LEAFSDIR/$1 >> workspace.$nextLevel
+ else
+ cat $AK_LEAFSDIR/$1 >> workspace.$nextLevel
+ fi
+ elif [ -f $AK_CHUNKSDIR/$p ]
+ then
+ _ak_log_info "Chunk available over net"
+ cat $AK_CHUNKSDIR/$p >> output
+ fi
+ fi
+ fi
+ done < workspace.$counter
+ counter="$(expr $counter + 1)"
+ done
+ base64 -d output > output.d
+ shaout="$(sha512sum output.d | awk '{print $1}')"
+ expHash="$2"
+ if [ "$shaout" == "$expHash" ]
+ then
+ base64 -d output
+ _ak_log_info "Recreation of $treeRootHash succeeded!"
+ else
+ _ak_log_error "Recreation of $treeRootHash failed!"
+ _ak_log_error "Expected: $expHash"
+ _ak_log_error "Got: $shaout"
+ fi
+}
+
+function _ak_fs_net_get(){
+ if [ -z $1 ]
+ then
+ echo "Please provide a SHA512 hash"
+ exit 1
+ fi
+ echo $1 | grep "[0123456789abcdef]\{128\}" > /dev/null 2>&1
+ if [ $? -ne 0 ]
+ then
+ echo "Look, I asked for a SHA512 hash, please try again"
+ exit 1
+ fi
+ _ak_fs_net_cat $1 $2 > $3
+}
+
+function _ak_fs_from_map_net_get_original_hash(){
+ TEMPDIR=$(_ak_make_temp_directory)
+ cd $TEMPDIR
+ if _ak_fs_verify_input_is_hash "$1"
+ then
+ _ak_he_list_peers | while read peer
+ do
+ _ak_log_debug "Trying $(_ak_he_url_request_map_hash_from_peer ${peer} $1)..."
+ curl -s --connect-timeout 3 -o ${TEMPDIR}/peer_${peer}.reply $(_ak_he_url_request_map_hash_from_peer ${peer} $1)
+ done
+ find . -type f | while read reply
+ do
+ cat ${reply} | jq >/dev/null 2>&1
+ if [ $? -eq 0 ]
+ then
+ _ak_log_error "Found error in reply: $(cat ${reply} | jq -r '.error')"
+ else
+ if [ $(cat ${reply} | wc -l) -ne 2 ]
+ then
+ _ak_log_error "${reply} is not two lines long"
+ exit 1
+ else
+ _ak_log_info "${reply} is two lines long"
+ fi
+ if [ $(cat ${reply} | grep '^[0-9a-z]\{128\} ' | wc -l) -eq 2 ]
+ then
+ _ak_log_info "${reply} contains two hashes"
+ else
+ _ak_log_error "${reply} doesn't contain the appropriate info"
+ exit 1
+ fi
+ cat ${reply} | head -n 1 | awk '{print $1}'
+ break
+ fi
+ done
+ fi
+}
+
+function _ak_fs_from_map_net_get_original_filename(){
+ TEMPDIR=$(_ak_make_temp_directory)
+ cd $TEMPDIR
+ if _ak_fs_verify_input_is_hash "$1"
+ then
+ _ak_he_list_peers | while read peer
+ do
+ _ak_log_debug "Trying $(_ak_he_url_request_map_hash_from_peer ${peer} $1)..."
+ curl -s --connect-timeout 3 -o ${TEMPDIR}/peer_${peer}.reply $(_ak_he_url_request_map_hash_from_peer ${peer} $1)
+ done
+ find . -type f | while read reply
+ do
+ cat ${reply} | jq >/dev/null 2>&1
+ if [ $? -eq 0 ]
+ then
+ _ak_log_error "Found error in reply: $(cat ${reply} | jq -r '.error')"
+ else
+ if [ $(cat ${reply} | wc -l) -ne 2 ]
+ then
+ _ak_log_error "${reply} is not two lines long"
+ exit 1
+ else
+ _ak_log_info "${reply} is two lines long"
+ fi
+ if [ $(cat ${reply} | grep '^[0-9a-z]\{128\} ' | wc -l) -eq 2 ]
+ then
+ _ak_log_info "${reply} contains two hashes"
+ else
+ _ak_log_error "${reply} doesn't contain the appropriate info"
+ exit 1
+ fi
+ cat ${reply} | head -n 1 | awk '{print $2}'
+ break
+ fi
+ done
+ fi
+}
+
+function _ak_fs_from_map_net_get_root_hash(){
+ TEMPDIR=$(_ak_make_temp_directory)
+ cd $TEMPDIR
+ if _ak_fs_verify_input_is_hash "$1"
+ then
+ _ak_he_list_peers | while read peer
+ do
+ _ak_log_debug "Trying $(_ak_he_url_request_map_hash_from_peer ${peer} $1)..."
+ curl -s --connect-timeout 3 -o ${TEMPDIR}/peer_${peer}.reply $(_ak_he_url_request_map_hash_from_peer ${peer} $1)
+ done
+ find . -type f | while read reply
+ do
+ cat ${reply} | jq >/dev/null 2>&1
+ if [ $? -eq 0 ]
+ then
+ _ak_log_error "Found error in reply: $(cat ${reply} | jq -r '.error')"
+ else
+ if [ $(cat ${reply} | wc -l) -ne 2 ]
+ then
+ _ak_log_error "${reply} is not two lines long"
+ exit 1
+ else
+ _ak_log_info "${reply} is two lines long"
+ fi
+ if [ $(cat ${reply} | grep '^[0-9a-z]\{128\} ' | wc -l) -eq 2 ]
+ then
+ _ak_log_info "${reply} contains two hashes"
+ else
+ _ak_log_error "${reply} doesn't contain the appropriate info"
+ exit 1
+ fi
+ cat ${reply} | grep 'level.1.map' | awk '{print $1}'
+ break
+ fi
+ done
+ fi
+}
+
+function _ak_fs_net_get_from_map_hash(){
+ if _ak_fs_verify_input_is_hash "$1"
+ then
+ _ak_fs_net_get `_ak_fs_from_map_net_get_root_hash $1` `_ak_fs_from_map_net_get_original_hash $1` `_ak_fs_from_map_get_original_filename $1`
+ fi
+}
+
+function _ak_fs_net_cat_from_map_hash(){
+ if _ak_fs_verify_input_is_hash "$1"
+ then
+ _ak_fs_net_cat `_ak_fs_from_map_net_get_root_hash $1` `_ak_fs_from_map_net_get_original_hash $1`
+ fi
+}
+
+_ak_log_debug "_ak_fs loaded $(caller)"