add fixed export
Some checks failed
continuous-integration/drone/push Build is failing

This commit is contained in:
Marko 2023-12-13 12:53:31 +01:00
parent bbc944273a
commit c6bc1776bd
No known key found for this signature in database
9 changed files with 47104 additions and 61 deletions

71
.ddev/providers/acquia.yaml Executable file
View File

@ -0,0 +1,71 @@
#ddev-generated
# Acquia provider configuration.
# To use this configuration,
# 1. Get your Acquia API token from your Account Settings->API Tokens.
# 2. Make sure your ssh key is authorized on your Acquia account at Account Settings->SSH Keys
# 3. `ddev auth ssh` (this typically needs only be done once per ddev session, not every pull).
# 4. Add / update the web_environment section in ~/.ddev/global_config.yaml
# or your project config.yamlwith the API keys:
# ```yaml
# web_environment:
# - ACQUIA_API_KEY=xxxxxxxx
# - ACQUIA_API_SECRET=xxxxx
# ```
# 5. Add the ACQUIA_ENVIRONMENT_ID environment variable to your project config.yaml, for example:
# ```yaml
# web_environment:
# - ACQUIA_ENVIRONMENT_ID=project1.dev
# - On the Acquia Cloud Platform you can find this out by navigating to the environments page,
# clicking on the header and look for the "SSH URL" line.
# Eg. `project1.dev@cool-projects.acquia-sites.com` would have a project ID of `project1.dev`
# 6. `ddev restart`
# 7. Use `ddev pull acquia` to pull the project database and files.
# 8. Optionally use `ddev push acquia` to push local files and database to Acquia. Note that `ddev push` is a command that can potentially damage your production site, so this is not recommended.
# Debugging: Use `ddev exec acli command` and `ddev exec acli auth:login`
# Instead of setting the environment variables in configuration files, you can use
# `ddev pull acquia --environment=ACQUIA_ENVIRONMENT_ID=yourproject.dev` for example
auth_command:
command: |
set -eu -o pipefail
if [ -z "${ACQUIA_API_KEY:-}" ] || [ -z "${ACQUIA_API_SECRET:-}" ]; then echo "Please make sure you have set ACQUIA_API_KEY and ACQUIA_API_SECRET in ~/.ddev/global_config.yaml" && exit 1; fi
if [ -z "${ACQUIA_ENVIRONMENT_ID:-}" ] ; then echo "Please set ACQUIA_ENVIRONMENT_ID via config.yaml or with '--environment=ACQUIA_ENVIRONMENT_ID=xxx'" && exit 1; fi
ssh-add -l >/dev/null || ( echo "Please 'ddev auth ssh' before running this command." && exit 1 )
acli -n auth:login -n --key="${ACQUIA_API_KEY}" --secret="${ACQUIA_API_SECRET}"
db_pull_command:
command: |
set -eu -o pipefail
# xargs here just trims whitespace
# We could use an easier technique when https://github.com/acquia/cli/issues/1629 is resolved
# just using `acli pull:db ${ACQUIA_ENVIRONMENT_ID}`
echo "Using ACQUIA_ENVIRONMENT_ID=${ACQUIA_ENVIRONMENT_ID}"
set -x # You can enable bash debugging output by uncommenting
db_dump=$(acli pull:db ${ACQUIA_ENVIRONMENT_ID} --no-interaction --no-import | tail -2l | xargs)
ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible
cp ${db_dump} /var/www/html/.ddev/.downloads/db.sql.gz
files_import_command:
command: |
# set -x # You can enable bash debugging output by uncommenting
set -eu -o pipefail
acli -n pull:files ${ACQUIA_ENVIRONMENT_ID}
# push is a dangerous command. If not absolutely needed it's better to delete these lines.
db_push_command:
command: |
set -eu -o pipefail
export ACLI_DB_HOST=db ACLI_DB_NAME=db ACLI_DB_USER=db ACLI_DB_PASSWORD=db
set -x # You can enable bash debugging output by uncommenting
acli push:db ${ACQUIA_ENVIRONMENT_ID} --no-interaction
# push is a dangerous command. If not absolutely needed it's better to delete these lines.
files_push_command:
command: |
# set -x # You can enable bash debugging output by uncommenting
set -eu -o pipefail
acli push:files ${ACQUIA_ENVIRONMENT_ID} --no-interaction

71
.ddev/providers/upsun.yaml Executable file
View File

@ -0,0 +1,71 @@
#ddev-generated
# Upsun provider configuration. This works out of the box, but can be edited to add
# your own preferences. If you edit it, remove the `ddev-generated` line from the top so
# that it won't be overwritten.
# To use this configuration,
# 1. Check out the site from Upsun and then configure it with `ddev config`. You'll want to use `ddev start` and make sure the basic functionality is working.
# 2. Obtain and configure an API token.
# a. Login to the Upsun Dashboard and go to My Profile->API Tokens to create an API token for DDEV to use.
# b. Add the API token to the `web_environment` section in your global ddev configuration at ~/.ddev/global_config.yaml:
# ```yaml
# web_environment:
# - UPSUN_CLI_TOKEN=abcdeyourtoken
# ```
# 3. Add UPSUN_PROJECT and UPSUN_ENVIRONMENT variables to your project `.ddev/config.yaml` or a `.ddev/config.upsun.yaml`
# ```yaml
# web_environment:
# - UPSUN_PROJECT=nf4amudfn23biyourproject
# - UPSUN_ENVIRONMENT=main
# 4. `ddev restart`
# 5. Run `ddev pull upsun`. After you agree to the prompt, the current upstream database and files will be downloaded.
# 6. Optionally use `ddev push upsun` to push local files and database to Upsun. Note that `ddev push` is a command that can potentially damage your production site, so this is not recommended.
# Debugging: Use `ddev exec upsun` to see what Upsun knows about
# your configuration and whether it's working correctly.
auth_command:
command: |
set -eu -o pipefail
if [ -z "${UPSUN_CLI_TOKEN:-}" ]; then echo "Please make sure you have set UPSUN_CLI_TOKEN." && exit 1; fi
if [ -z "${UPSUN_PROJECT:-}" ]; then echo "Please make sure you have set UPSUN_PROJECT." && exit 1; fi
if [ -z "${UPSUN_ENVIRONMENT:-}" ]; then echo "Please make sure you have set UPSUN_ENVIRONMENT." && exit 1; fi
db_pull_command:
command: |
# set -x # You can enable bash debugging output by uncommenting
set -eu -o pipefail
export UPSUN_CLI_NO_INTERACTION=1
ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible
upsun db:dump --yes --gzip --file=/var/www/html/.ddev/.downloads/db.sql.gz --project="${UPSUN_PROJECT}" --environment="${UPSUN_ENVIRONMENT}"
files_import_command:
command: |
# set -x # You can enable bash debugging output by uncommenting
set -eu -o pipefail
export UPSUN_CLI_NO_INTERACTION=1
# Use $UPSUN_MOUNTS if it exists to get list of mounts to download, otherwise just web/sites/default/files (drupal)
declare -a mounts=(${UPSUN_MOUNTS:-/web/sites/default/files})
upsun mount:download --all --yes --quiet --project="${UPSUN_PROJECT}" --environment="${UPSUN_ENVIRONMENT}" --target=/var/www/html
# push is a dangerous command. If not absolutely needed it's better to delete these lines.
db_push_command:
command: |
# set -x # You can enable bash debugging output by uncommenting
set -eu -o pipefail
export UPSUN_CLI_NO_INTERACTION=1
ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible
pushd /var/www/html/.ddev/.downloads >/dev/null
gzip -dc db.sql.gz | upsun db:sql --project="${UPSUN_PROJECT}" --environment="${UPSUN_ENVIRONMENT}"
# push is a dangerous command. If not absolutely needed it's better to delete these lines.
# TODO: This is a naive, Drupal-centric push, which needs adjustment for the mount to be pushed.
files_push_command:
command: |
# set -x # You can enable bash debugging output by uncommenting
set -eu -o pipefail
export UPSUN_CLI_NO_INTERACTION=1
ls "${DDEV_FILES_DIR}" >/dev/null # This just refreshes stale NFS if possible
upsun mount:upload --yes --quiet --project="${UPSUN_PROJECT}" --environment="${UPSUN_ENVIRONMENT}" --source="${DDEV_FILES_DIR}" --mount=web/sites/default/files

46887
qodana.sarif.json Normal file

File diff suppressed because it is too large Load Diff

View File

@ -52,13 +52,15 @@ class HiltesImportCommand extends Command
$io->success('Start Hiltes Import'); $io->success('Start Hiltes Import');
$rootPath = $this->GetProjectRootDir(); $rootPath = $this->GetProjectRootDir();
$delta = $input->getOption('delta');
/** /**
* @var HiltesImport * @var HiltesImport
*/ */
$hiltesImport = new HiltesImport($this->productRepository, $this->warehouseRepository, $this->stockRepository, $this->logger, $rootPath); $hiltesImport = new HiltesImport($this->productRepository, $this->warehouseRepository, $this->stockRepository, $this->logger, $rootPath);
$hiltesImport->startImport($input->getOption('delta')); $hiltesImport->startImport($delta);
if (isset($r['error'])) { if (isset($r['error'])) {
$io->error($r['text']); $io->error($r['text']);
@ -70,11 +72,10 @@ class HiltesImportCommand extends Command
$jtl = new Jtl($this->productRepository, $this->warehouseRepository, $this->stockRepository, $this->logger, $rootPath); $jtl = new Jtl($this->productRepository, $this->warehouseRepository, $this->stockRepository, $this->logger, $rootPath);
//Export für Standartlager //Export für Standartlager
dump("Standard"); $jtl->createExportFile($jtl->getProducts(['1', '3', '5', '10']), 'standard' . ($delta ? '_delta' : ''));
$jtl->createExportFile($jtl->getProducts(['1', '3', '5', '10']), 'standard');
dump("WMS");
//Export für WMS Lager //Export für WMS Lager
$jtl->createExportFile($jtl->getProducts(['8']), 'wms'); $jtl->createExportFile($jtl->getProducts(['8']), 'wms' . ($delta ? '_delta' : ''));
$io->success('Done.'); $io->success('Done.');
return Command::SUCCESS; return Command::SUCCESS;

View File

@ -43,27 +43,45 @@ class JtlExportCommand extends Command
protected function configure(): void protected function configure(): void
{ {
$this $this->addOption('delta', 'd', InputOption::VALUE_NONE, 'Delta Import');
->addArgument('arg1', InputArgument::OPTIONAL, 'Argument description')
->addOption('option1', null, InputOption::VALUE_NONE, 'Option description');
} }
protected function execute(InputInterface $input, OutputInterface $output): int protected function execute(InputInterface $input, OutputInterface $output): int
{ {
$io = new SymfonyStyle($input, $output); $io = new SymfonyStyle($input, $output);
$io->success('Start JTL Export'); $io->success('Start JTL Export');
$rootPath = $this->GetProjectRootDir();
$delta = $input->getOption('delta');
/** /**
* @var HiltesImport * @var HiltesImport
*/ */
$jtl = new Jtl($this->productRepository, $this->warehouseRepository, $this->stockRepository, $this->logger); $jtl = new Jtl($this->productRepository, $this->warehouseRepository, $this->stockRepository, $this->logger, $rootPath);
$data = $jtl->getProducts(); //Export für Standartlager
$jtl->createExportFile($data); $jtl->createExportFile($jtl->getProducts(['1', '3', '5', '10']), 'standard' . ($delta ? '_delta' : ''));
//Export für WMS Lager
$jtl->createExportFile($jtl->getProducts(['8']), 'wms' . ($delta ? '_delta' : ''));
$io->success('Ende JTL Export'); $io->success('Ende JTL Export');
return Command::SUCCESS; return Command::SUCCESS;
} }
public static function GetProjectRootDir()
{
$dirFullPath = __DIR__;
//PRE: $dirs = /app/public/src/Helpers
$dirs = explode('/', $dirFullPath);
array_pop($dirs); //remove last element in array ('Helpers')
array_pop($dirs); //remove the next last element from array ('src')
//POST: $dirs = /app/public
return implode('/', $dirs);
}
} }

View File

@ -56,7 +56,7 @@ class Ftp
throw new Exception("Local file does not exist: $localFile"); throw new Exception("Local file does not exist: $localFile");
} }
$r = ftp_nb_put($ftp, $this->getRemoteDir() . $remoteFile, $localFile, FTP_BINARY); $r = @ftp_nb_put($ftp, $this->getRemoteDir() . $remoteFile, $localFile, FTP_BINARY);
while ($r == FTP_MOREDATA) { while ($r == FTP_MOREDATA) {
// Continue uploading... // Continue uploading...

View File

@ -243,6 +243,14 @@ class Hiltes
} }
} }
/**
* Sends a request to the specified URL using CURL.
*
* @param string $url The URL to send the request to.
* @param mixed $param The data to send with the request. It should be an associative array.
* @param bool $auth (Optional) Indicates whether to include authorization in the request headers. Default is false.
* @return mixed The response from the server.
*/
public function sendToHiltes($url, $param, $auth = false) public function sendToHiltes($url, $param, $auth = false)
{ {
$ch = curl_init($url); $ch = curl_init($url);
@ -265,6 +273,12 @@ class Hiltes
return $result; return $result;
} }
/**
* Creates a JSON object based on the given data.
*
* @param mixed $data The data to be converted to JSON.
* @return array The JSON object.
*/
public function createJson($data) public function createJson($data)
{ {
$arr = array( $arr = array(

View File

@ -19,7 +19,7 @@ use Symfony\Component\Finder\Finder;
class HiltesImport class HiltesImport
{ {
protected $currentDirPath; protected $currentDirPath;
protected $cachedWarehouseIds; protected $cachedWarehouse;
protected $arrData = array(); protected $arrData = array();
private $productRepository; private $productRepository;
private $stockRepository; private $stockRepository;
@ -29,6 +29,8 @@ class HiltesImport
private $cachedStockIds; private $cachedStockIds;
private $rootPath; private $rootPath;
private $deleteFiles = false;
public function __construct(ProductRepository $productRepository, WarehouseRepository $warehouseRepository, StockRepository $stockRepository, LoggerInterface $logger, string $rootPath) public function __construct(ProductRepository $productRepository, WarehouseRepository $warehouseRepository, StockRepository $stockRepository, LoggerInterface $logger, string $rootPath)
{ {
$this->productRepository = $productRepository; $this->productRepository = $productRepository;
@ -129,8 +131,10 @@ class HiltesImport
$this->logger->error($e->getMessage()); $this->logger->error($e->getMessage());
} }
if ($this->deleteFiles) {
unlink($srcFile); unlink($srcFile);
unlink($srcFile . '.Ende'); unlink($srcFile . '.Ende');
}
$this->logger->info($count . ' Datensätze importiert'); $this->logger->info($count . ' Datensätze importiert');
@ -151,7 +155,7 @@ class HiltesImport
* @param array $arr * @param array $arr
* @return void * @return void
*/ */
protected function trimArray(array &$arr): void private function trimArray(array &$arr): void
{ {
foreach ($arr as $k => $v) { foreach ($arr as $k => $v) {
$arr[$k] = trim($v); $arr[$k] = trim($v);
@ -160,11 +164,16 @@ class HiltesImport
/** /**
* @param array $data * @param array $data
* @return void * @return false
*/ */
protected function saveData(array $data): void protected function saveData(array $data): bool
{ {
$warehouse = $this->checkWarehouseName($data[3]); if (!isset($data[3])) {
$this->logger->error('No Warehouse' . $data[3]);
return false;
}
$warehouse = $this->checkWarehouseName(trim($data[3]));
$gtin = $this->checkProduct(substr($data[0], 1)); $gtin = $this->checkProduct(substr($data[0], 1));
if (!empty($warehouse) && !empty($this->cachedStockIds[$gtin][$warehouse->getId()])) { if (!empty($warehouse) && !empty($this->cachedStockIds[$gtin][$warehouse->getId()])) {
@ -177,33 +186,35 @@ class HiltesImport
$stock->setInstock((int)$data[1] / 100); $stock->setInstock((int)$data[1] / 100);
$this->stockRepository->save($stock, true); $this->stockRepository->save($stock, true);
return true;
} }
/** /**
* @param string $warehouseName * @param string $warehouseName
* @return Warehouse|false|mixed|null * @return int
*/ */
private function checkWarehouseName(string $warehouseName) private function checkWarehouseName(string $warehouseName)
{ {
$warehouseName = ltrim($warehouseName, 0); $warehouseName = ltrim($warehouseName, 0);
if (empty($this->cachedWarehouseIds[$warehouseName])) { if (empty($this->cachedWarehouse[$warehouseName])) {
$warehouse = $this->warehouseRepository->findOneBy(['id' => (int)$warehouseName]); $warehouse = $this->warehouseRepository->findOneBy(['name' => $warehouseName]);
//Wenn kein Lager gefunden wurde, dann lege es an
if (empty($warehouse)) { if (empty($warehouse)) {
$warehouse = new Warehouse(); $warehouse = new Warehouse();
$warehouse->setId((int)$warehouseName); //$warehouse->setId((int)$warehouseName);
$warehouse->setName($warehouseName); $warehouse->setName($warehouseName);
$this->warehouseRepository->save($warehouse, true); $warehouseId = $this->warehouseRepository->save($warehouse, true);
} $newWarehouse = $this->warehouseRepository->findOneBy(['id' => $warehouseId]);
$this->cachedWarehouseIds[$warehouseName] = $warehouse;
}
if (!empty($this->cachedWarehouseIds[$warehouseName])) { $this->cachedWarehouse[$warehouseName] = $newWarehouse;
return $this->cachedWarehouseIds[$warehouseName]; } else {
$this->cachedWarehouse[$warehouseName] = $warehouse;
} }
}
return false; return $this->cachedWarehouse[$warehouseName];
} }
/** /**

View File

@ -102,34 +102,6 @@ class Jtl
} }
} }
// foreach ($r as $product) {
//
// $stock = $this->stockRepository->findBy(['product_id' => $product->getId()]);
//
// if ($stock) {
// foreach ($stock as $s) {
// $warehouse = $s->getWarehouse();
// $warehouseName = $warehouse->getName();
//
// $data[$product->getId() . $warehouseName] = [
// 'gtin' => $product->getGtin(),
// 'stock' => $s->getInstock(),
// 'warehouse' => $this->arrLager[$warehouseName] ?? 'Lager ' . $warehouseName
// ];
//
// }
// } else {
// $data[$product->getId()] = [
// 'gtin' => $product->getGtin(),
// 'stock' => 0,
// 'warehouse' => 0
// ];
//
// $this->logger->info('No stock for product ' . $product->getId());
// }
// }
return $data; return $data;
} }
@ -145,8 +117,6 @@ class Jtl
$file = $this->rootPath . '/jtl/' . $warehouse . '.csv'; $file = $this->rootPath . '/jtl/' . $warehouse . '.csv';
dump($file);
$writer = Writer::createFromPath($file, 'w+'); $writer = Writer::createFromPath($file, 'w+');
$bytes = $writer->insertAll(new ArrayIterator($data)); $bytes = $writer->insertAll(new ArrayIterator($data));