diff --git a/.github/workflows/phpcs.yml b/.github/workflows/phpcs.yml new file mode 100644 index 000000000..5577844b6 --- /dev/null +++ b/.github/workflows/phpcs.yml @@ -0,0 +1,49 @@ +name: PHP CodeSniffer + +on: + push: + branches: [ trunk ] + pull_request: + +jobs: + phpcs: + runs-on: ubuntu-latest + name: PHP CodeSniffer + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup PHP + uses: shivammathur/setup-php@v2 + with: + php-version: '8.1' + extensions: mbstring, json + coverage: none + tools: composer:v2 + ini-values: memory_limit=1G + + - name: Get composer cache directory + id: composer-cache + run: echo "dir=$(composer config cache-files-dir)" >> $GITHUB_OUTPUT + + - name: Cache composer dependencies + uses: actions/cache@v3 + with: + path: ${{ steps.composer-cache.outputs.dir }} + key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }} + restore-keys: ${{ runner.os }}-composer- + + - name: Install composer dependencies + run: composer install --prefer-dist --no-progress --no-suggest --no-dev --optimize-autoloader + + - name: Install dev dependencies for PHPCS + run: composer install --dev --prefer-dist --no-progress --no-suggest --optimize-autoloader + + - name: Run PHP CodeSniffer + run: php vendor/bin/phpcs -d memory_limit=1G . -n + + - name: Annotate PHPCS results + if: failure() + run: | + echo "::error::PHP CodeSniffer found coding standard violations. Please run 'composer run-script lint-fix' to auto-fix issues where possible, or manually fix the remaining issues." diff --git a/bin/build-phar/DataLiberationBoxCompactor.php b/bin/build-phar/DataLiberationBoxCompactor.php index 836504b64..36c0076b2 100644 --- a/bin/build-phar/DataLiberationBoxCompactor.php +++ b/bin/build-phar/DataLiberationBoxCompactor.php @@ -2,31 +2,30 @@ use KevinGH\Box\Compactor\Compactor; -class DataLiberationBoxCompactor implements Compactor -{ - /** - * {@inheritdoc} - */ - public function compact(string $file, string $contents): string - { - if (!preg_match('/\.(php|json|lock)$/', $file)) { - return ''; - } +class DataLiberationBoxCompactor implements Compactor { - if ( - str_contains($file, 'platform_check.php') || - str_contains($file, '/tests/') || - str_contains($file, '/.git/') || - str_contains($file, '/.github/') || - str_contains($file, '/bin/') - ) { - return ''; - } + /** + * {@inheritdoc} + */ + public function compact( string $file, string $contents ): string { + if ( ! preg_match( '/\.(php|json|lock)$/', $file ) ) { + return ''; + } - if( str_contains($contents, 'Your Composer dependencies require ') ) { - return ''; - } + if ( + str_contains( $file, 'platform_check.php' ) || + str_contains( $file, '/tests/' ) || + str_contains( $file, '/.git/' ) || + str_contains( $file, '/.github/' ) || + str_contains( $file, '/bin/' ) + ) { + return ''; + } - return $contents; - } -} \ No newline at end of file + if ( str_contains( $contents, 'Your Composer dependencies require ' ) ) { + return ''; + } + + return $contents; + } +} diff --git a/bin/build-phar/box.php b/bin/build-phar/box.php index 5d48fec2d..e2e164c53 100644 --- a/bin/build-phar/box.php +++ b/bin/build-phar/box.php @@ -1,6 +1,6 @@ __DIR__ . '/uploads', - 'new_site_url' => 'https://smoke-test.org', - 'new_site_content_root_url' => 'https://smoke-test.org', - 'new_media_root_url' => 'https://smoke-test.org', -]); +$c = WordPress\DataLiberation\Importer\StreamImporter::create_for_wxr_file( + __DIR__ . '/nosuchfile.xml', + array( + 'uploads_path' => __DIR__ . '/uploads', + 'new_site_url' => 'https://smoke-test.org', + 'new_site_content_root_url' => 'https://smoke-test.org', + 'new_media_root_url' => 'https://smoke-test.org', + ) +); -WordPress\DataLiberation\URL\WPURL::parse('https://example.com'); +WordPress\DataLiberation\URL\WPURL::parse( 'https://example.com' ); echo 'Stream importer created!'; - diff --git a/bin/build-phar/truncate-composer-checks.php b/bin/build-phar/truncate-composer-checks.php index ecd4d1822..1e2322892 100644 --- a/bin/build-phar/truncate-composer-checks.php +++ b/bin/build-phar/truncate-composer-checks.php @@ -1,5 +1,4 @@ startBuffering(); -$phar['.box/bin/check-requirements.php'] = ''; +$phar['.box/bin/check-requirements.php'] = ''; $phar['vendor/composer/platform_check.php'] = ''; // Set to empty string to truncate $phar->stopBuffering(); - - diff --git a/components/ByteStream/ReadStream/class-basebytereadstream.php b/components/ByteStream/ReadStream/class-basebytereadstream.php index 1f4dfc7d0..475607bdd 100644 --- a/components/ByteStream/ReadStream/class-basebytereadstream.php +++ b/components/ByteStream/ReadStream/class-basebytereadstream.php @@ -24,7 +24,7 @@ abstract class BaseByteReadStream implements ByteReadStream { * fox jumps over the lazy dog. * ^--^ * consumed but retained for seek()-ing backwards. - * + * * @var int */ protected $max_lookbehind_bytes = 2048; diff --git a/composer.json b/composer.json index badedd703..23503b1ae 100644 --- a/composer.json +++ b/composer.json @@ -80,8 +80,8 @@ "build-blueprints-phar": "box compile -c phar-blueprints.json", "regenerate-json-schema": "node components/Blueprints/Versions/Version2/json-schema/regenerate-schema.ts", "test": "phpunit -c phpunit.xml", - "lint": "phpcs .", - "lint-fix": "phpcbf ." + "lint": "phpcs -d memory_limit=1G .", + "lint-fix": "phpcbf -d memory_limit=1G ." }, "repositories": [ { diff --git a/file.php b/file.php deleted file mode 100644 index fc44f468e..000000000 --- a/file.php +++ /dev/null @@ -1,22 +0,0 @@ -get_stylesheet(), 'wp_theme' ); -if ( ! $term ) { - $term = wp_insert_term( $theme->get_stylesheet(), 'wp_theme' ); - $term_id = $term['term_id']; -} else { - $term_id = $term->term_id; -} -$post_id = wp_insert_post( - array( - 'post_type' => 'wp_template_part', - 'post_title' => '" + checkbox.dataset.post_title.replace( /' / g, - "\\'", - ) + "', 'post_name' => '" + checkbox . dataset . post_name . replace( /'/g, "\\'" ) + "', 'post_content' => '" + checkbox.dataset.post_content.replace( /'/g, "\\'" ).replace( /\\n/g, "\n" ) + "', 'post_status' => 'publish' ) -); - -wp_set_object_terms( $post_id, - $term_id, 'wp_theme' );", -} ); diff --git a/phpcs.xml b/phpcs.xml index 24974dd2d..5d7fc9e63 100644 --- a/phpcs.xml +++ b/phpcs.xml @@ -5,6 +5,10 @@ vendor/* */vendor-patched/* */Tests/* + plugins/* + examples/* + rector.php + components/CORSProxy/cors-proxy-functions.php diff --git a/plugins/data-liberation/import-screen.js b/plugins/data-liberation/import-screen.js index 83a316629..26a8d66f5 100644 --- a/plugins/data-liberation/import-screen.js +++ b/plugins/data-liberation/import-screen.js @@ -7,151 +7,164 @@ import { store, getContext, getServerContext } from '@wordpress/interactivity'; const apiFetch = window.wp.apiFetch; -const { state, actions } = store('dataLiberation', { - state: { - get isImportTypeSelected() { - return getContext()?.importType === state.selectedImportType; +const { state, actions } = store( + 'dataLiberation', + { + state: { + get isImportTypeSelected() { + return getContext() ? .importType === state.selectedImportType; + }, + get frontloadingFailed() { + return getContext() ? .item.post_status === 'error'; + }, + get isCurrentImportAtStage() { + return getContext() ? .stage.id === state.currentImport.stage; + }, }, - get frontloadingFailed() { - return getContext()?.item.post_status === 'error'; - }, - get isCurrentImportAtStage() { - return getContext()?.stage.id === state.currentImport.stage; - }, - }, - /** - * We're bombarding the server with HTTP requests both to run the import and to - * refresh the reported progress. Do we need such an aggressive refresh rate? - */ - callbacks: { /** - * Fetches a fresh interactivity state from the server every second. - * - * @TODO: Get rid of the interactivity-state API endpoint. - * Let's use the iAPI router via data-wp-router-region and actions.navigate(). - * If it's the same url you have to use actions.navigate(url, { force: true }) - * to clear the cache. - * See https://github.com/WordPress/gutenberg/blob/trunk/packages/interactivity-router/README.md + * We're bombarding the server with HTTP requests both to run the import and to + * refresh the reported progress. Do we need such an aggressive refresh rate? */ - async startRefreshingProgress() { - while (true) { - if ( - !state.currentImport.active || + callbacks: { + /** + * Fetches a fresh interactivity state from the server every second. + * + * @TODO: Get rid of the interactivity-state API endpoint. + * Let's use the iAPI router via data-wp-router-region and actions.navigate(). + * If it's the same url you have to use actions.navigate(url, { force: true }) + * to clear the cache. + * See https://github.com/WordPress/gutenberg/blob/trunk/packages/interactivity-router/README.md + */ + async startRefreshingProgress() { + while (true) { + if ( + ! state.currentImport.active || state.currentImport.stage === '#finished' - ) { - await new Promise((resolve) => setTimeout(resolve, 1000)); - continue; - } - - const response = await apiFetch({ - path: '/data-liberation/v1/interactivity-state', - }); - Object.assign(state, response); - - await new Promise((resolve) => setTimeout(resolve, 2000)); - } - }, - /** - * Continuously asks the server to continue the import. - * - * @TODO: Ensure two parallel requests are never processing the same import. - * That would lead to race conditions, undefined states, bad stuff in general. - */ - async startImportLoop() { - let failuresInARow = 0; - const maxFailures = 3; - while (true) { - if ( - !state.currentImport.active || - state.currentImport.stage === '#finished' - ) { - await new Promise((resolve) => setTimeout(resolve, 1000)); - continue; - } + ) { + await new Promise( (resolve) => setTimeout( resolve, 1000 ) ); + continue; + } - try { - const response = await fetch( - `${window.location.pathname}?page=data-liberation&continue=true`, + const response = await apiFetch( { - credentials: 'same-origin', // Preserves cookies + path: '/data-liberation/v1/interactivity-state', } ); - const text = await response.text(); - const parser = new DOMParser(); - const doc = parser.parseFromString(text, 'text/html'); - const importOutput = doc.querySelector('#import-output'); - if (importOutput) { - document.querySelector('#import-output').innerHTML = - importOutput.innerHTML; + Object.assign( state, response ); + + await new Promise( (resolve) => setTimeout( resolve, 2000 ) ); + } + }, + /** + * Continuously asks the server to continue the import. + * + * @TODO: Ensure two parallel requests are never processing the same import. + * That would lead to race conditions, undefined states, bad stuff in general. + */ + async startImportLoop() { + let failuresInARow = 0; + const maxFailures = 3; + while (true) { + if ( + ! state.currentImport.active || + state.currentImport.stage === '#finished' + ) { + await new Promise( (resolve) => setTimeout( resolve, 1000 ) ); + continue; } - failuresInARow = 0; - } catch (error) { - failuresInARow++; - // TODO: notify the user about the problem. - if (failuresInARow >= maxFailures) { - throw error; + + try { + const response = await fetch( + `${window.location.pathname} ? page = data - liberation & continue = true`, + { + credentials: 'same-origin', // Preserves cookies + } + ); + const text = await response.text(); + const parser = new DOMParser(); + const doc = parser.parseFromString( text, 'text/html' ); + const importOutput = doc.querySelector( '#import-output' ); + if (importOutput) { + document.querySelector( '#import-output' ).innerHTML = + importOutput.innerHTML; + } + failuresInARow = 0; + } catch (error) { + failuresInARow++; + // TODO: notify the user about the problem. + if (failuresInARow >= maxFailures) { + throw error; + } } + await new Promise( (resolve) => setTimeout( resolve, 1000 ) ); } - await new Promise((resolve) => setTimeout(resolve, 1000)); - } - }, - }, - actions: { - setImportType: () => { - if (getContext()) { - state.selectedImportType = getContext().importType; - } + }, }, + actions: { + setImportType: () => { + if (getContext()) { + state.selectedImportType = getContext().importType; + } + }, - async archiveImport() { - window.location.href = `${window.location.pathname}?page=data-liberation&archive=true`; - }, + async archiveImport() { + window.location.href = `${window.location.pathname} ? page = data - liberation & archive = true`; + }, - // Existing download management actions - async retryDownload(event) { - const postId = event.target.dataset.postId; - const response = await apiFetch({ - path: '/data-liberation/v1/retry-download', - method: 'POST', - data: { post_id: postId }, - }); + // Existing download management actions + async retryDownload( event ) { + const postId = event.target.dataset.postId; + const response = await apiFetch( + { + path: '/data-liberation/v1/retry-download', + method: 'POST', + data: { post_id: postId }, + } + ); if (response.success) { window.location.reload(); } - }, + }, - async ignoreDownload(event) { - const postId = event.target.dataset.postId; - const response = await apiFetch({ - path: '/data-liberation/v1/ignore-download', - method: 'POST', - data: { post_id: postId }, - }); + async ignoreDownload( event ) { + const postId = event.target.dataset.postId; + const response = await apiFetch( + { + path: '/data-liberation/v1/ignore-download', + method: 'POST', + data: { post_id: postId }, + } + ); if (response.success) { window.location.reload(); } - }, + }, - async changeDownloadUrl(event) { - const postId = event.target.dataset.postId; - const newUrl = prompt('Enter the new URL for this asset:'); + async changeDownloadUrl( event ) { + const postId = event.target.dataset.postId; + const newUrl = prompt( 'Enter the new URL for this asset:' ); - if (!newUrl) return; + if ( ! newUrl) { + return; + } - const response = await apiFetch({ - path: '/data-liberation/v1/change-download-url', - method: 'POST', - data: { - post_id: postId, - new_url: newUrl, - }, - }); + const response = await apiFetch( + { + path: '/data-liberation/v1/change-download-url', + method: 'POST', + data: { + post_id: postId, + new_url: newUrl, + }, + } + ); if (response.success) { window.location.reload(); } + }, }, - }, -}); + } +); diff --git a/plugins/data-liberation/plugin.php b/plugins/data-liberation/plugin.php index 038df052a..58c613c99 100644 --- a/plugins/data-liberation/plugin.php +++ b/plugins/data-liberation/plugin.php @@ -19,8 +19,8 @@ use WordPress\HttpClient\Request; use WordPress\Markdown\MarkdownImporter; -if(file_exists(__DIR__ . '/php-toolkit.phar')) { - // Production – built and installed plugin +if ( file_exists( __DIR__ . '/php-toolkit.phar' ) ) { + // Production – built and installed plugin require_once __DIR__ . '/php-toolkit.phar'; } else { // Development – plugin mounted in WordPress via Playground CLI mounts @@ -31,7 +31,6 @@ /** * Don't run KSES on the attribute values during the import. * - * * Without this filter, WP_HTML_Tag_Processor::set_attribute() will * assume the value is a URL and run KSES on it, which will incorrectly * prefix relative paths with http://. @@ -120,17 +119,20 @@ function () { * to the remote server via the CORS proxy. This is useful for cloning private * Git repositories. */ -add_filter('wp_http_client_request', function (Request $request) { - if(isset($request->headers['x-cors-proxy-allowed-request-headers'])) { - $prefix = $request->headers['x-cors-proxy-allowed-request-headers'] . ','; - } else { - $prefix = ''; - } - if(str_contains($request->url, '.git/')) { - $request->headers['x-cors-proxy-allowed-request-headers'] = $prefix . 'Authorization'; - } - return $request; -}); +add_filter( + 'wp_http_client_request', + function ( Request $request ) { + if ( isset( $request->headers['x-cors-proxy-allowed-request-headers'] ) ) { + $prefix = $request->headers['x-cors-proxy-allowed-request-headers'] . ','; + } else { + $prefix = ''; + } + if ( str_contains( $request->url, '.git/' ) ) { + $request->headers['x-cors-proxy-allowed-request-headers'] = $prefix . 'Authorization'; + } + return $request; + } +); // Register admin menu add_action( @@ -238,7 +240,7 @@ function data_liberation_admin_page() { array( 'frontloadingFailed' => function () { $context = wp_interactivity_get_context(); - return $context['item']->post_status === 'error'; + return 'error' === $context['item']->post_status; }, 'isCurrentImportAtStage' => function () { @@ -337,7 +339,7 @@ function () { if ( empty( $_FILES['markdown_zip']['tmp_name'] ) ) { wp_die( 'Please select a file to upload' ); } - if ( $_FILES['markdown_zip']['type'] !== 'application/zip' ) { + if ( 'application/zip' !== $_FILES['markdown_zip']['type'] ) { wp_die( 'Invalid file type' ); } $attachment_id = media_handle_upload( 'markdown_zip', 0 ); @@ -360,7 +362,7 @@ function () { if ( false === $import_session ) { // @TODO: More user friendly error message – maybe redirect back to the import screen and - // show the error there. + // show the error there. wp_die( 'Failed to create an import session' ); } @@ -371,10 +373,10 @@ function () { * @TODO: The schedule doesn't seem to be actually running. */ // if(is_wp_error(wp_schedule_event(time(), 'data_liberation_minute', 'data_liberation_process_import'))) { - // wp_delete_attachment($attachment_id, true); - // // @TODO: More user friendly error message – maybe redirect back to the import screen and - // // show the error there. - // wp_die('Failed to schedule import – the "data_liberation_minute" schedule may not be registered.'); + // wp_delete_attachment($attachment_id, true); + // @TODO: More user friendly error message – maybe redirect back to the import screen and + // show the error there. + // wp_die('Failed to schedule import – the "data_liberation_minute" schedule may not be registered.'); // } wp_redirect( @@ -405,9 +407,9 @@ function data_liberation_process_import() { function data_liberation_import_step( $session, $importer = null ) { $metadata = $session->get_metadata(); - if(!$importer) { - $importer = data_liberation_create_importer( $metadata ); - } + if ( ! $importer ) { + $importer = data_liberation_create_importer( $metadata ); + } if ( ! $importer ) { return; } @@ -427,7 +429,7 @@ function data_liberation_import_step( $session, $importer = null ) { // we need to give it more time. Otherwise every time we retry, // we'll start from the beginning and never advance past the // frontloading stage. - if ( $importer->get_stage() === StreamImporter::STAGE_FRONTLOAD_ASSETS ) { + if ( StreamImporter::STAGE_FRONTLOAD_ASSETS === $importer->get_stage() ) { if ( $fetched_files > 0 ) { break; } @@ -439,7 +441,7 @@ function data_liberation_import_step( $session, $importer = null ) { // No negotiation, we're done. // @TODO: Make it easily configurable // @TODO: Bump the number of download attempts for the placeholders, - // set the status to `error` in each interrupted download. + // set the status to `error` in each interrupted download. break; } @@ -449,7 +451,7 @@ function data_liberation_import_step( $session, $importer = null ) { $should_advance_to_next_stage = null !== $importer->get_next_stage(); if ( $should_advance_to_next_stage ) { if ( StreamImporter::STAGE_FRONTLOAD_ASSETS === $importer->get_stage() ) { - $resolved_all_failures = $session->count_unfinished_frontloading_stubs() === 0; + $resolved_all_failures = 0 === $session->count_unfinished_frontloading_stubs(); if ( ! $resolved_all_failures ) { break; } @@ -521,7 +523,7 @@ function data_liberation_create_importer( $import ) { if ( ! file_exists( $temp_dir ) ) { mkdir( $temp_dir, 0777, true ); $zip = new ZipArchive(); - if ( $zip->open( $zip_path ) === true ) { + if ( true === $zip->open( $zip_path ) ) { $zip->extractTo( $temp_dir ); $zip->close(); } else { @@ -583,7 +585,7 @@ function ( $post ) { ); } - $frontloading_progress = array_map( + $frontloading_progress = array_map( function ( $progress, $url ) { $progress['url'] = $url; return $progress; @@ -591,7 +593,7 @@ function ( $progress, $url ) { $import_session ? $import_session->get_frontloading_progress() : array(), array_keys( $import_session ? $import_session->get_frontloading_progress() : array() ) ); - $frontloading_stubs = $import_session ? $import_session->get_frontloading_stubs() : array(); + $frontloading_stubs = $import_session ? $import_session->get_frontloading_stubs() : array(); return array( // Current import state: 'currentImport' => $import_session diff --git a/plugins/git-repo/git-repo.php b/plugins/git-repo/git-repo.php index 2b26c20c8..362e6e98a 100644 --- a/plugins/git-repo/git-repo.php +++ b/plugins/git-repo/git-repo.php @@ -20,155 +20,165 @@ use function WordPress\Filesystem\wp_unix_path_resolve_dots; $git_repo_path = __DIR__ . '/git-test-server-data'; -if(!is_dir($git_repo_path)) { - mkdir($git_repo_path, 0777, true); +if ( ! is_dir( $git_repo_path ) ) { + mkdir( $git_repo_path, 0777, true ); } -$fs = LocalFilesystem::create($git_repo_path); -$repository = new GitRepository($fs); -$git_fs = GitFilesystem::create($repository); +$fs = LocalFilesystem::create( $git_repo_path ); +$repository = new GitRepository( $fs ); +$git_fs = GitFilesystem::create( $repository ); $server = new GitEndpoint( - $repository, - [ - 'root' => GIT_DIRECTORY_ROOT, - ] + $repository, + array( + 'root' => GIT_DIRECTORY_ROOT, + ) ); -$request_bytes = file_get_contents('php://input'); +$request_bytes = file_get_contents( 'php://input' ); // $response = new StreamingResponseWriter(); $response = new BufferingResponseWriter(); -$query_string = $_SERVER['REQUEST_URI'] ?? ""; -$request_path = substr($query_string, strlen($_SERVER['PHP_SELF'])); -if($request_path[0] === '?') { - $request_path = substr($request_path, 1); - $request_path = preg_replace('/&(amp;)?/', '?', $request_path, 1); +$query_string = $_SERVER['REQUEST_URI'] ?? ''; +$request_path = substr( $query_string, strlen( $_SERVER['PHP_SELF'] ) ); +if ( '?' === $request_path[0] ) { + $request_path = substr( $request_path, 1 ); + $request_path = preg_replace( '/&(amp;)?/', '?', $request_path, 1 ); } // Before handling the request, commit all the pages to the git repo -$synced_post_types = [ - 'page', - 'post', - 'local_file', -]; -switch($request_path) { - // ls refs – protocol discovery - case '/info/refs?service=git-upload-pack': - // ls refs or fetch – smart protocol - case '/git-upload-pack': - // @TODO: Do streaming and amend the commit every few changes - // @TODO: Use the streaming exporter instead of the ad-hoc loop below - $diff = [ - 'updates' => [], - 'deletes' => [], - ]; - foreach($synced_post_types as $post_type) { - $pages = get_posts([ - 'post_type' => $post_type, - 'post_status' => 'publish', - ]); - foreach($pages as $page) { - $file_path = '/' . ltrim(wp_unix_path_resolve_dots($post_type . '/' . $page->post_name . '.html'), '/'); - $metadata = []; - foreach(['post_date_gmt', 'post_title', 'menu_order'] as $key) { - $metadata[$key] = get_post_field($key, $page->ID); - } - - $converter = new AnnotatedBlockMarkupProducer( - new BlocksWithMetadata($page->post_content, $metadata) - ); - $block_markup = $converter->produce(); - if (!$block_markup) { - throw new Exception('Failed to convert the post to HTML'); - } - // @TODO: Run the Markdown or block markup exporter - $diff['updates'][$file_path] = $block_markup; - } - $visitor = new FilesystemVisitor( - new ChrootLayer($git_fs, $post_type) - ); - while($visitor->next()) { - $event = $visitor->get_event(); - if($event->is_entering()) { - foreach($event->files as $file_name) { - $path = '/' . ltrim(wp_unix_path_resolve_dots($post_type . '/' . $event->dir . '/' . $file_name), '/'); - if(!isset($diff['updates'][$path])) { - $diff['deletes'][] = $path; - } - } - } - } - } - if(!$repository->commit($diff)) { - throw new Exception('Failed to commit changes'); - } - break; +$synced_post_types = array( + 'page', + 'post', + 'local_file', +); +switch ( $request_path ) { + // ls refs – protocol discovery + case '/info/refs?service=git-upload-pack': + // ls refs or fetch – smart protocol + case '/git-upload-pack': + // @TODO: Do streaming and amend the commit every few changes + // @TODO: Use the streaming exporter instead of the ad-hoc loop below + $diff = array( + 'updates' => array(), + 'deletes' => array(), + ); + foreach ( $synced_post_types as $post_type ) { + $pages = get_posts( + array( + 'post_type' => $post_type, + 'post_status' => 'publish', + ) + ); + foreach ( $pages as $page ) { + $file_path = '/' . ltrim( wp_unix_path_resolve_dots( $post_type . '/' . $page->post_name . '.html' ), '/' ); + $metadata = array(); + foreach ( array( 'post_date_gmt', 'post_title', 'menu_order' ) as $key ) { + $metadata[ $key ] = get_post_field( $key, $page->ID ); + } + + $converter = new AnnotatedBlockMarkupProducer( + new BlocksWithMetadata( $page->post_content, $metadata ) + ); + $block_markup = $converter->produce(); + if ( ! $block_markup ) { + throw new Exception( 'Failed to convert the post to HTML' ); + } + // @TODO: Run the Markdown or block markup exporter + $diff['updates'][ $file_path ] = $block_markup; + } + $visitor = new FilesystemVisitor( + new ChrootLayer( $git_fs, $post_type ) + ); + while ( $visitor->next() ) { + $event = $visitor->get_event(); + if ( $event->is_entering() ) { + foreach ( $event->files as $file_name ) { + $path = '/' . ltrim( wp_unix_path_resolve_dots( $post_type . '/' . $event->dir . '/' . $file_name ), '/' ); + if ( ! isset( $diff['updates'][ $path ] ) ) { + $diff['deletes'][] = $path; + } + } + } + } + } + if ( ! $repository->commit( $diff ) ) { + throw new Exception( 'Failed to commit changes' ); + } + break; } -$server->handle_request($request_path, $request_bytes, $response); +$server->handle_request( $request_path, $request_bytes, $response ); // @TODO: Support the use-case below in the streaming importer // @TODO: When a page is moved, don't delete the old page and create a new one but -// rather update the existing page. -if($request_path === '/git-receive-pack') { - // throw new Exception("test"); - foreach($synced_post_types as $post_type) { - $updated_ids = []; - foreach($git_fs->ls($post_type) as $file_name) { - $file_path = $post_type . '/' . $file_name; - $converter = new AnnotatedBlockMarkupConsumer( - $git_fs->get_contents($file_path) - ); - $result = $converter->consume(); - - $existing_posts = get_posts([ - 'post_type' => $post_type, - 'meta_key' => 'local_file_path', - 'meta_value' => $file_path, - ]); - - $filename_without_extension = pathinfo($file_name, PATHINFO_FILENAME); - - if($existing_posts) { - $post_id = $existing_posts[0]->ID; - } else { - $post_id = wp_insert_post([ - 'post_type' => $post_type, - 'post_status' => 'publish', - 'post_title' => $filename_without_extension, - 'meta_input' => [ - 'local_file_path' => $file_path, - ], - ]); - } - $updated_ids[] = $post_id; - - $metadata = $result->get_all_metadata(['first_value_only' => true]); - $updated = wp_update_post(array( - 'ID' => $post_id, - 'post_name' => $filename_without_extension, - 'post_content' => $result->get_block_markup(), - 'post_title' => $metadata['post_title'] ?? '', - 'post_date_gmt' => $metadata['post_date_gmt'] ?? '', - 'menu_order' => $metadata['menu_order'] ?? '', - 'meta_input' => $metadata, - )); - if(is_wp_error($updated)) { - throw new Exception('Failed to update post'); - } - } - - // Delete posts that were not updated (i.e. files were deleted) - $posts_to_delete = get_posts([ - 'post_type' => $post_type, - 'post_status' => 'publish', - 'posts_per_page' => -1, - 'post__not_in' => $updated_ids, - 'fields' => 'ids' - ]); - - foreach($posts_to_delete as $post_id) { - wp_delete_post($post_id, true); - } - } +// rather update the existing page. +if ( '/git-receive-pack' === $request_path ) { + // throw new Exception("test"); + foreach ( $synced_post_types as $post_type ) { + $updated_ids = array(); + foreach ( $git_fs->ls( $post_type ) as $file_name ) { + $file_path = $post_type . '/' . $file_name; + $converter = new AnnotatedBlockMarkupConsumer( + $git_fs->get_contents( $file_path ) + ); + $result = $converter->consume(); + + $existing_posts = get_posts( + array( + 'post_type' => $post_type, + 'meta_key' => 'local_file_path', + 'meta_value' => $file_path, + ) + ); + + $filename_without_extension = pathinfo( $file_name, PATHINFO_FILENAME ); + + if ( $existing_posts ) { + $post_id = $existing_posts[0]->ID; + } else { + $post_id = wp_insert_post( + array( + 'post_type' => $post_type, + 'post_status' => 'publish', + 'post_title' => $filename_without_extension, + 'meta_input' => array( + 'local_file_path' => $file_path, + ), + ) + ); + } + $updated_ids[] = $post_id; + + $metadata = $result->get_all_metadata( array( 'first_value_only' => true ) ); + $updated = wp_update_post( + array( + 'ID' => $post_id, + 'post_name' => $filename_without_extension, + 'post_content' => $result->get_block_markup(), + 'post_title' => $metadata['post_title'] ?? '', + 'post_date_gmt' => $metadata['post_date_gmt'] ?? '', + 'menu_order' => $metadata['menu_order'] ?? '', + 'meta_input' => $metadata, + ) + ); + if ( is_wp_error( $updated ) ) { + throw new Exception( 'Failed to update post' ); + } + } + + // Delete posts that were not updated (i.e. files were deleted) + $posts_to_delete = get_posts( + array( + 'post_type' => $post_type, + 'post_status' => 'publish', + 'posts_per_page' => -1, + 'post__not_in' => $updated_ids, + 'fields' => 'ids', + ) + ); + + foreach ( $posts_to_delete as $post_id ) { + wp_delete_post( $post_id, true ); + } + } } diff --git a/plugins/static-files-editor/plugin.php b/plugins/static-files-editor/plugin.php index fc954dba0..f18262d63 100644 --- a/plugins/static-files-editor/plugin.php +++ b/plugins/static-files-editor/plugin.php @@ -120,11 +120,14 @@ public static function get_data_source() { self::$data_source = GitDataSource::create( $settings ); break; case 'github_repository': - $settings['gitRepo'] = self::get_git_remote_url( $settings['gitRepo'], [ - 'provider' => 'github', - 'token' => get_option( 'msf_github_token', '' ), - ] ); - self::$data_source = GitDataSource::create( $settings ); + $settings['gitRepo'] = self::get_git_remote_url( + $settings['gitRepo'], + array( + 'provider' => 'github', + 'token' => get_option( 'msf_github_token', '' ), + ) + ); + self::$data_source = GitDataSource::create( $settings ); break; } @@ -164,13 +167,13 @@ public static function menu_item_callback() { try { self::sync_data_source(); $data_source = self::get_data_source(); - $fs = $data_source->get_filesystem(); - foreach( $fs->ls('/') as $entry ) { - if( ! $fs->is_file( $entry ) ) { + $fs = $data_source->get_filesystem(); + foreach ( $fs->ls( '/' ) as $entry ) { + if ( ! $fs->is_file( $entry ) ) { continue; } $extension = pathinfo( $entry, PATHINFO_EXTENSION ); - if( ! in_array( $extension, ['md', 'html'] ) ) { + if ( ! in_array( $extension, array( 'md', 'html' ) ) ) { continue; } self::get_or_create_post_for_file( $entry ); @@ -199,13 +202,13 @@ public static function menu_item_callback() { $post_id = null; foreach ( $posts as $post ) { $path = get_post_meta( $post->ID, 'local_file_path', true ); - if ( $path !== '/my-first-note.md' ) { + if ( '/my-first-note.md' !== $path ) { $post_id = $post->ID; break; } } // Fallback to first post if no other found - if ( $post_id === null ) { + if ( null === $post_id ) { $post_id = $posts[0]->ID; } @@ -240,7 +243,7 @@ function () { self::register_post_type(); // Redirect menu page to custom route global $pagenow; - if ( $pagenow === 'admin.php' && isset( $_GET['page'] ) && $_GET['page'] === 'static_files_editor' ) { + if ( 'admin.php' === $pagenow && isset( $_GET['page'] ) && 'static_files_editor' === $_GET['page'] ) { self::menu_item_callback(); } } @@ -423,7 +426,7 @@ function ( $hook ) { ); $screen = get_current_screen(); - $enqueue_script = $screen && $screen->base === 'post' && $screen->post_type === WP_LOCAL_FILE_POST_TYPE; + $enqueue_script = $screen && 'post' === $screen->base && WP_LOCAL_FILE_POST_TYPE === $screen->post_type; if ( ! $enqueue_script ) { return; } @@ -661,7 +664,7 @@ function () { add_filter( 'block_editor_settings_all', function ( $settings, $context ) { - if ( isset( $context->post ) && $context->post->post_type === WP_LOCAL_FILE_POST_TYPE ) { + if ( isset( $context->post ) && WP_LOCAL_FILE_POST_TYPE === $context->post->post_type ) { $settings['autosaveInterval'] = 86400 * 100; } return $settings; @@ -676,7 +679,7 @@ function ( $settings, $context ) { function ( $response, $post, $request ) { // Short-circuit on non-GET requests to avoid messing with // POST requests. - if ( $request->get_method() !== 'GET' ) { + if ( 'GET' !== $request->get_method() ) { return $response; } @@ -710,14 +713,14 @@ function ( $processed_post, $unprocessed_post, $unsanitized_postarr, $update ) u } $creating_revision = false; - if ( $processed_post['post_type'] === 'revision' ) { + if ( 'revision' === $processed_post['post_type'] ) { $parent_post = get_post( $processed_post['post_parent'] ); - if ( $parent_post->post_type === WP_LOCAL_FILE_POST_TYPE ) { + if ( WP_LOCAL_FILE_POST_TYPE === $parent_post->post_type ) { $creating_revision = true; } } - $updating_post = $processed_post['post_type'] === WP_LOCAL_FILE_POST_TYPE && $update; + $updating_post = WP_LOCAL_FILE_POST_TYPE === $processed_post['post_type'] && $update; $should_run = $updating_post || $creating_revision; if ( ! $should_run ) { return $processed_post; @@ -814,7 +817,7 @@ function ( $processed_post, $unprocessed_post, $unsanitized_postarr, $update ) u } else { $blocks_with_metadata = self::annotated_block_markup_to_blocks_with_metadata( $merge_result->get_merged_content() ); $delta_post = array_merge( - ['post_content' => $blocks_with_metadata->get_block_markup()], + array( 'post_content' => $blocks_with_metadata->get_block_markup() ), $blocks_with_metadata->get_all_metadata( array( 'first_value_only' => true ) ), ); /** @@ -883,7 +886,7 @@ public static function resize_to_max_dimensions_if_files_is_an_image( $image_pat // getimagesize() returns false for non-images (and // also image formats it can't handle) $image_size = @getimagesize( $image_path ); - if ( $image_size === false ) { + if ( false === $image_size ) { return $image_path; } @@ -1086,7 +1089,7 @@ private static function parse_local_file( $content, $format ) { } private static function post_to_mergable_string( $post, $format ) { - if ( $format === 'html' ) { + if ( 'html' === $format ) { return trim( $post['post_content'], "\n " ); } $blocks_with_metadata = self::post_entity_to_blocks_with_metadata( $post ); @@ -1185,7 +1188,7 @@ private static function wordpressify_static_assets_urls( $content ) { // @TODO: Also work with tags, account // for .md and directory links etc. - if ( $p->get_tag() !== 'IMG' ) { + if ( 'IMG' !== $p->get_tag() ) { continue; } @@ -1251,23 +1254,23 @@ public static function get_local_files_list( $subdirectory = '' ) { private static function build_local_file_list( $fs, $dir, &$list, $path_to_post ) { $items = $fs->ls( $dir ); - if ( $items === false ) { + if ( false === $items ) { return; } foreach ( $items as $item ) { // Exclude the autosaves directory from the files tree - if ( $dir === '/' && $item === WP_AUTOSAVES_DIRECTORY ) { + if ( '/' === $dir && WP_AUTOSAVES_DIRECTORY === $item ) { continue; } // Exclude the .gitkeep file from the files tree. // WP_Git_Filesystem::mkdir() creates an empty .gitkeep file in each created // directory since Git doesn't support empty directories. - if ( $item === '.gitkeep' ) { + if ( '.gitkeep' === $item ) { continue; } - $path = $dir === '/' ? "/$item" : "$dir/$item"; + $path = '/' === $dir ? "/$item" : "$dir/$item"; if ( $fs->is_dir( $path ) ) { $node = array( @@ -1550,7 +1553,7 @@ public static function update_file_endpoint( $request ) { // Regenerate the content from scratch if we're changing the file format. $previous_extension = pathinfo( $from_path, PATHINFO_EXTENSION ); $new_extension = pathinfo( $to_path, PATHINFO_EXTENSION ); - if ( $existing_post->post_type === WP_LOCAL_FILE_POST_TYPE && $previous_extension !== $new_extension ) { + if ( WP_LOCAL_FILE_POST_TYPE === $existing_post->post_type && $previous_extension !== $new_extension ) { $parsed = self::parse_local_file( $previous_content, $previous_extension @@ -1682,7 +1685,7 @@ public static function create_files_batch_endpoint( $request ) { continue; } $paths = $event->files; - if ( $visitor->get_current_depth() === 1 ) { + if ( 1 === $visitor->get_current_depth() ) { // Make sure we save the top-level directories $paths = array_merge( array( $event->dir ), $event->files ); } @@ -1690,7 +1693,7 @@ public static function create_files_batch_endpoint( $request ) { $type = $uploaded_fs->is_dir( $path ) ? 'directory' : 'file'; $post_id = null; $created_path = wp_join_unix_paths( $create_in_dir, $path ); - if ( $type === 'post' ) { + if ( 'post' === $type ) { $created_post = get_posts( array( 'post_type' => WP_LOCAL_FILE_POST_TYPE, @@ -1751,7 +1754,7 @@ public static function get_git_branches( $git_repo_url ) { public static function get_git_branches_endpoint( $request ) { $git_repo_string = $request->get_param( 'gitRepo' ); $provider = $request->get_param( 'provider' ); - $git_repo_url = self::get_git_remote_url( $git_repo_string, [ 'provider' => $provider ] ); + $git_repo_url = self::get_git_remote_url( $git_repo_string, array( 'provider' => $provider ) ); return self::get_git_branches( $git_repo_url ); } @@ -1760,12 +1763,12 @@ public static function get_git_files_endpoint( $request ) { $git_repo_url = $request->get_param( 'gitRepo' ); $provider = $request->get_param( 'provider' ); - $repo->add_remote( 'origin', self::get_git_remote_url( $git_repo_url, [ 'provider' => $provider ] ) ); + $repo->add_remote( 'origin', self::get_git_remote_url( $git_repo_url, array( 'provider' => $provider ) ) ); $remote = new GitRemote( $repo, 'origin' ); $refs = $remote->ls_refs( 'refs/heads/' ); - $branch = $request->get_param( 'branch' ); + $branch = $request->get_param( 'branch' ); if ( ! isset( $refs[ $branch ] ) ) { return new WP_Error( 'branch_not_found', 'Branch "' . $branch . '" not found' ); } @@ -1780,9 +1783,9 @@ public static function get_git_files_endpoint( $request ) { public static function get_git_remote_url( $git_repo_url, $options = array() ) { switch ( $options['provider'] ) { case 'github': - $url = WPURL::parse( $git_repo_url ); + $url = WPURL::parse( $git_repo_url ); $url->username = get_option( 'msf_github_token', '' ); - $url = $url->toString(); + $url = $url->toString(); break; case 'git': default: @@ -1923,7 +1926,7 @@ public static function get_settings() { $user = wp_get_current_user(); $uploads_dir = wp_upload_dir(); - $settings = get_option( 'static_files_editor_settings' ) ?: array(); + $settings = get_option( 'static_files_editor_settings' ) ? get_option( 'static_files_editor_settings' ) : array(); $settings = array_merge( array( 'gitRepo' => '', @@ -1944,11 +1947,11 @@ public static function get_settings() { */ public static function get_github_repos_endpoint() { $github_token = get_option( 'msf_github_token', '' ); - + if ( empty( $github_token ) ) { return new WP_Error( 'no_token', 'GitHub token not found', array( 'status' => 400 ) ); } - + $response = wp_remote_get( 'https://api.github.com/user/repos?visibility=all&sort=updated&per_page=100', array( @@ -1959,42 +1962,42 @@ public static function get_github_repos_endpoint() { ), ) ); - + if ( is_wp_error( $response ) ) { return new WP_Error( 'github_api_error', $response->get_error_message(), array( 'status' => 500 ) ); } - - $body = wp_remote_retrieve_body( $response ); + + $body = wp_remote_retrieve_body( $response ); $repos = json_decode( $body, true ); - + if ( ! is_array( $repos ) ) { return new WP_Error( 'invalid_response', 'Invalid response from GitHub API', array( 'status' => 500 ) ); } - foreach($repos as $key => $repo) { + foreach ( $repos as $key => $repo ) { $git_url = $repo['git_url']; - if(str_starts_with($git_url, 'git://')) { - $git_url = 'https' . substr($git_url, 3); + if ( str_starts_with( $git_url, 'git://' ) ) { + $git_url = 'https' . substr( $git_url, 3 ); } - $repos[$key]['http_clone_url'] = $git_url; + $repos[ $key ]['http_clone_url'] = $git_url; } - + return $repos; } - + /** * Store GitHub token endpoint */ public static function store_github_token_endpoint( $request ) { $token = $request->get_param( 'token' ); - + if ( empty( $token ) ) { return new WP_Error( 'no_token', 'No token provided', array( 'status' => 400 ) ); } - + // Store the token in site options update_option( 'msf_github_token', $token ); - + return array( 'success' => true ); } @@ -2002,10 +2005,9 @@ public static function store_github_token_endpoint( $request ) { public static function clear_github_token_endpoint() { // Delete the token from site options delete_option( 'msf_github_token' ); - + return array( 'success' => true ); } - } -WP_Static_Files_Editor_Plugin::initialize(); \ No newline at end of file +WP_Static_Files_Editor_Plugin::initialize(); diff --git a/plugins/url-updater/update_plugin.php b/plugins/url-updater/update_plugin.php index b7deb4775..7ab1750cb 100644 --- a/plugins/url-updater/update_plugin.php +++ b/plugins/url-updater/update_plugin.php @@ -39,7 +39,7 @@ private function unzipPackage() { $this->wp_plugins_directory->mkdir( $this->new_version_extract_to_dir ); $extension = pathinfo( $this->package_absolute_path, PATHINFO_EXTENSION ); - if ( $extension === 'zip' ) { + if ( 'zip' === $extension ) { $zip_fs = ZipFilesystem::create( FileReadStream::from_path( $this->package_absolute_path ) ); copy_between_filesystems( array( @@ -55,10 +55,10 @@ private function unzipPackage() { $extracted_dirs, function ( $dir ) { $basename = basename( $dir ); - return $basename !== '__MACOSX' && $basename !== '.DS_Store'; + return '__MACOSX' !== $basename && '.DS_Store' !== $basename; } ); - if ( count( $extracted_dirs ) === 1 ) { + if ( 1 === count( $extracted_dirs ) ) { $potential_root_dir = wp_join_unix_paths( $this->new_version_extract_to_dir, $extracted_dirs[0] ); if ( $this->wp_plugins_directory->is_dir( $potential_root_dir ) ) { return $potential_root_dir; @@ -66,7 +66,7 @@ function ( $dir ) { } return $this->new_version_extract_to_dir; - } elseif ( $extension === 'php' ) { + } elseif ( 'php' === $extension ) { $plugin_name = basename( $this->package_absolute_path, '.php' ); $this->wp_plugins_directory->mkdir( $plugin_name ); $this->wp_plugins_directory->put_contents( diff --git a/plugins/url-updater/url-updater.php b/plugins/url-updater/url-updater.php index ff40707d9..2aa92279d 100644 --- a/plugins/url-updater/url-updater.php +++ b/plugins/url-updater/url-updater.php @@ -47,7 +47,7 @@ function rpi_render_admin_page() { $stored_url = rpi_get_stored_plugin_url( $plugin_file ); // Direct update without changing URL - if ( isset( $_GET['direct_update'] ) && $_GET['direct_update'] === 'true' ) { + if ( isset( $_GET['direct_update'] ) && 'true' === $_GET['direct_update'] ) { $installed_plugin_file = rpi_install_plugin_from_url( $stored_url, true, $plugin_file ); if ( is_wp_error( $installed_plugin_file ) ) { echo '

' . esc_html( $installed_plugin_file->get_error_message() ) . '

'; @@ -149,10 +149,10 @@ function rpi_install_plugin_from_url( string $package_url, bool $is_update = fal return new WP_Error( 'download_failed', $tmp_file->get_error_message() ); } - $parsed_url = wp_parse_url( $package_url ); - $package_path = isset( $parsed_url['path'] ) ? $parsed_url['path'] : ''; + $parsed_url = wp_parse_url( $package_url ); + $package_path = isset( $parsed_url['path'] ) ? $parsed_url['path'] : ''; $package_extension = 'zip'; - $base_name = $original_plugin_file ? basename( dirname( $original_plugin_file ) ) : basename( $package_path, '.' . $package_extension ); + $base_name = $original_plugin_file ? basename( dirname( $original_plugin_file ) ) : basename( $package_path, '.' . $package_extension ); /** * $tmp_file has a random component in the filename. WordPress would