implemented simple queue of hashes that were not successfully uploaded to a storage container

This commit is contained in:
Chris
2020-01-13 00:28:44 +01:00
parent 220a3103fa
commit 80e210af86
3 changed files with 84 additions and 5 deletions

View File

@@ -145,9 +145,12 @@ function storageControllerUpload($hash)
{
// Lets' check all storage controllers and tell them that a new file was uploaded
$sc = getStorageControllers();
$allgood = true;
$uploadedhash =$hash;
foreach($sc as $contr)
{
if((new $contr())->isEnabled()===true)
$controller = new $contr();
if($controller->isEnabled()===true)
{
$source = ROOT.DS.'data'.DS.$hash.DS.$hash;
if(defined('ENCRYPTION_KEY') && ENCRYPTION_KEY) //ok so we got an encryption key which means we'll store only the encrypted file
@@ -155,14 +158,44 @@ function storageControllerUpload($hash)
$enc = new Encryption;
$encoded_file = ROOT.DS.'tmp'.DS.$hash.'.enc';
$enc->encryptFile($source,$encoded_file,base64_decode(ENCRYPTION_KEY));
(new $contr())->pushFile($encoded_file,$hash.'.enc');
$controller->pushFile($encoded_file,$hash.'.enc');
unlink($encoded_file);
$uploadedhash = $hash.'.enc';
}
else // not encrypted
(new $contr())->pushFile($source,$hash);
$controller->pushFile($source,$hash);
//let's check if the file is really there. If not, queue it for later
if(!$controller->hashExists($uploadedhash))
{
$allgood = false;
$queuefile=ROOT.DS.'tmp'.DS.'controllerqueue.txt';
if(!file_exists($queuefile) || !stringInFile($hash,$queuefile))
{
$fp=fopen($queuefile,'a');
if($fp)
{
fwrite($fp,$hash."\n");
fclose($fp);
}
}
}
}
}
return $allgood;
}
function stringInFile($string,$file)
{
$handle = fopen($file, 'r');
while (($line = fgets($handle)) !== false) {
$line=trim($line);
if($line==$string) return true;
}
fclose($handle);
return false;
}
function getNewHash($type,$length=10)

View File

@@ -13,7 +13,7 @@ class S3Storage implements StorageController
{
private $s3;
function connect(){
require ROOT.DS.'storage-controllers'.DS.'s3'.DS.'aws-autoloader.php';
require_once(ROOT.DS.'storage-controllers'.DS.'s3'.DS.'aws-autoloader.php');
$this->s3 = new Aws\S3\S3Client([
'version' => 'latest',
'region' => 'us-east-1',

46
tools/cron.php Normal file
View File

@@ -0,0 +1,46 @@
<?php
if(php_sapi_name() !== 'cli') exit('This script can only be called via CLI');
error_reporting(E_ALL & ~E_DEPRECATED & ~E_STRICT & ~E_NOTICE);
ini_set('memory_limit', -1);
define('DS', DIRECTORY_SEPARATOR);
define('ROOT', dirname(__FILE__).DS.'..');
include_once(ROOT.DS.'inc/config.inc.php');
include_once(ROOT.DS.'inc/core.php');
switch($argv[1])
{
case 'uploadqueue':
uploadqueue();
break;
default:
exit("[ERR] Command not found. Available commands are: 15min,hourly,daily,weekly");
}
function uploadqueue()
{
$queuefile = ROOT.DS.'tmp'.DS.'controllerqueue.txt';
if(!file_exists($queuefile))
exit("[i] File does not exist (nothing to upload)\n");
$queue = file($queuefile);
if(count($queue)<1)
exit("[i] Nothing to upload\n");
$newqueue = array();
foreach($queue as $hash)
{
$hash = trim($hash);
echo " [i] Checking $hash\n";
$hash = trim($hash);
if(isExistingHash($hash)) //check if hash is still on server
{
echo " [$hash] still exists locally. Uploading.. ";
$success = storageControllerUpload($hash); // and retry the upload
echo ($success===true?' => SUCCESS. Removing from queue':'FAILED. Will be re-added to queue')."\n";
if(!$success)
$newqueue[]=$hash;
}
}
file_put_contents($queuefile,implode("\n",$newqueue));
}