Update to Drupal 8.2.5. For more information, see https://www.drupal.org/project/drupal/releases/8.2.5

This commit is contained in:
Pantheon Automation 2017-01-04 16:50:53 -08:00 committed by Greg Anderson
parent 8544b60b39
commit db56c09587
86 changed files with 2413 additions and 488 deletions

View file

@ -108,8 +108,13 @@ class Download extends ProcessPluginBase implements ContainerFactoryPluginInterf
// Stream the request body directly to the final destination stream.
$this->configuration['guzzle_options']['sink'] = $destination_stream;
// Make the request. Guzzle throws an exception for anything other than 200.
$this->httpClient->get($source, $this->configuration['guzzle_options']);
try {
// Make the request. Guzzle throws an exception for anything but 200.
$this->httpClient->get($source, $this->configuration['guzzle_options']);
}
catch (\Exception $e) {
throw new MigrateException("{$e->getMessage()} ($source)");
}
return $final_destination;
}

View file

@ -112,20 +112,17 @@ class FileCopy extends ProcessPluginBase implements ContainerFactoryPluginInterf
return $destination;
}
$replace = $this->getOverwriteMode();
// We attempt the copy/move first to avoid calling file_prepare_directory()
// any more than absolutely necessary.
$final_destination = $this->writeFile($source, $destination, $replace);
if ($final_destination) {
return $final_destination;
}
// If writeFile didn't work, make sure there's a writable directory in
// place.
// Check if a writable directory exists, and if not try to create it.
$dir = $this->getDirectory($destination);
if (!file_prepare_directory($dir, FILE_CREATE_DIRECTORY | FILE_MODIFY_PERMISSIONS)) {
throw new MigrateException("Could not create or write to directory '$dir'");
// If the directory exists and is writable, avoid file_prepare_directory()
// call and write the file to destination.
if (!is_dir($dir) || !is_writable($dir)) {
if (!file_prepare_directory($dir, FILE_CREATE_DIRECTORY | FILE_MODIFY_PERMISSIONS)) {
throw new MigrateException("Could not create or write to directory '$dir'");
}
}
$final_destination = $this->writeFile($source, $destination, $replace);
$final_destination = $this->writeFile($source, $destination, $this->getOverwriteMode());
if ($final_destination) {
return $final_destination;
}

View file

@ -22,15 +22,17 @@ class Iterator extends ProcessPluginBase {
* Runs a process pipeline on each destination property per list item.
*/
public function transform($value, MigrateExecutableInterface $migrate_executable, Row $row, $destination_property) {
$return = array();
foreach ($value as $key => $new_value) {
$new_row = new Row($new_value, array());
$migrate_executable->processRow($new_row, $this->configuration['process']);
$destination = $new_row->getDestination();
if (array_key_exists('key', $this->configuration)) {
$key = $this->transformKey($key, $migrate_executable, $new_row);
$return = [];
if (!is_null($value)) {
foreach ($value as $key => $new_value) {
$new_row = new Row($new_value, []);
$migrate_executable->processRow($new_row, $this->configuration['process']);
$destination = $new_row->getDestination();
if (array_key_exists('key', $this->configuration)) {
$key = $this->transformKey($key, $migrate_executable, $new_row);
}
$return[$key] = $destination;
}
$return[$key] = $destination;
}
return $return;
}

View file

@ -310,13 +310,13 @@ abstract class SourcePluginBase extends PluginBase implements MigrateSourceInter
while (!isset($this->currentRow) && $this->getIterator()->valid()) {
$row_data = $this->getIterator()->current() + $this->configuration;
$this->getIterator()->next();
$this->fetchNextRow();
$row = new Row($row_data, $this->migration->getSourcePlugin()->getIds(), $this->migration->getDestinationIds());
// Populate the source key for this row.
$this->currentSourceIds = $row->getSourceIdValues();
// Pick up the existing map row, if any, unless getNextRow() did it.
// Pick up the existing map row, if any, unless fetchNextRow() did it.
if (!$this->mapRowAdded && ($id_map = $this->idMap->getRowBySource($this->currentSourceIds))) {
$row->setIdMap($id_map);
}
@ -348,7 +348,14 @@ abstract class SourcePluginBase extends PluginBase implements MigrateSourceInter
}
/**
* Checks if the incoming data is newer than what we've previously imported.
* Position the iterator to the following row.
*/
protected function fetchNextRow() {
$this->getIterator()->next();
}
/**
* Check if the incoming data is newer than what we've previously imported.
*
* @param \Drupal\migrate\Row $row
* The row we're importing.

View file

@ -5,6 +5,7 @@ namespace Drupal\migrate\Plugin\migrate\source;
use Drupal\Core\Database\Database;
use Drupal\Core\Plugin\ContainerFactoryPluginInterface;
use Drupal\Core\State\StateInterface;
use Drupal\migrate\MigrateException;
use Drupal\migrate\Plugin\MigrationInterface;
use Drupal\migrate\Plugin\migrate\id_map\Sql;
use Drupal\migrate\Plugin\MigrateIdMapInterface;
@ -42,6 +43,22 @@ abstract class SqlBase extends SourcePluginBase implements ContainerFactoryPlugi
*/
protected $state;
/**
* The count of the number of batches run.
*
* @var int
*/
protected $batch = 0;
/**
* Number of records to fetch from the database during each batch.
*
* A value of zero indicates no batching is to be done.
*
* @var int
*/
protected $batchSize = 0;
/**
* {@inheritdoc}
*/
@ -160,70 +177,110 @@ abstract class SqlBase extends SourcePluginBase implements ContainerFactoryPlugi
* we will take advantage of the PDO-based API to optimize the query up-front.
*/
protected function initializeIterator() {
$this->prepareQuery();
// Initialize the batch size.
if ($this->batchSize == 0 && isset($this->configuration['batch_size'])) {
// Valid batch sizes are integers >= 0.
if (is_int($this->configuration['batch_size']) && ($this->configuration['batch_size']) >= 0) {
$this->batchSize = $this->configuration['batch_size'];
}
else {
throw new MigrateException("batch_size must be greater than or equal to zero");
}
}
// Get the key values, for potential use in joining to the map table.
$keys = array();
// If a batch has run the query is already setup.
if ($this->batch == 0) {
$this->prepareQuery();
// The rules for determining what conditions to add to the query are as
// follows (applying first applicable rule):
// 1. If the map is joinable, join it. We will want to accept all rows
// which are either not in the map, or marked in the map as NEEDS_UPDATE.
// Note that if high water fields are in play, we want to accept all rows
// above the high water mark in addition to those selected by the map
// conditions, so we need to OR them together (but AND with any existing
// conditions in the query). So, ultimately the SQL condition will look
// like (original conditions) AND (map IS NULL OR map needs update
// OR above high water).
$conditions = $this->query->orConditionGroup();
$condition_added = FALSE;
if (empty($this->configuration['ignore_map']) && $this->mapJoinable()) {
// Build the join to the map table. Because the source key could have
// multiple fields, we need to build things up.
$count = 1;
$map_join = '';
$delimiter = '';
foreach ($this->getIds() as $field_name => $field_schema) {
if (isset($field_schema['alias'])) {
$field_name = $field_schema['alias'] . '.' . $this->query->escapeField($field_name);
// Get the key values, for potential use in joining to the map table.
$keys = array();
// The rules for determining what conditions to add to the query are as
// follows (applying first applicable rule):
// 1. If the map is joinable, join it. We will want to accept all rows
// which are either not in the map, or marked in the map as NEEDS_UPDATE.
// Note that if high water fields are in play, we want to accept all rows
// above the high water mark in addition to those selected by the map
// conditions, so we need to OR them together (but AND with any existing
// conditions in the query). So, ultimately the SQL condition will look
// like (original conditions) AND (map IS NULL OR map needs update
// OR above high water).
$conditions = $this->query->orConditionGroup();
$condition_added = FALSE;
if (empty($this->configuration['ignore_map']) && $this->mapJoinable()) {
// Build the join to the map table. Because the source key could have
// multiple fields, we need to build things up.
$count = 1;
$map_join = '';
$delimiter = '';
foreach ($this->getIds() as $field_name => $field_schema) {
if (isset($field_schema['alias'])) {
$field_name = $field_schema['alias'] . '.' . $this->query->escapeField($field_name);
}
$map_join .= "$delimiter$field_name = map.sourceid" . $count++;
$delimiter = ' AND ';
}
$map_join .= "$delimiter$field_name = map.sourceid" . $count++;
$delimiter = ' AND ';
}
$alias = $this->query->leftJoin($this->migration->getIdMap()->getQualifiedMapTableName(), 'map', $map_join);
$conditions->isNull($alias . '.sourceid1');
$conditions->condition($alias . '.source_row_status', MigrateIdMapInterface::STATUS_NEEDS_UPDATE);
$condition_added = TRUE;
$alias = $this->query->leftJoin($this->migration->getIdMap()
->getQualifiedMapTableName(), 'map', $map_join);
$conditions->isNull($alias . '.sourceid1');
$conditions->condition($alias . '.source_row_status', MigrateIdMapInterface::STATUS_NEEDS_UPDATE);
$condition_added = TRUE;
// And as long as we have the map table, add its data to the row.
$n = count($this->getIds());
for ($count = 1; $count <= $n; $count++) {
$map_key = 'sourceid' . $count;
$this->query->addField($alias, $map_key, "migrate_map_$map_key");
}
if ($n = count($this->migration->getDestinationIds())) {
// And as long as we have the map table, add its data to the row.
$n = count($this->getIds());
for ($count = 1; $count <= $n; $count++) {
$map_key = 'destid' . $count++;
$map_key = 'sourceid' . $count;
$this->query->addField($alias, $map_key, "migrate_map_$map_key");
}
if ($n = count($this->migration->getDestinationIds())) {
for ($count = 1; $count <= $n; $count++) {
$map_key = 'destid' . $count++;
$this->query->addField($alias, $map_key, "migrate_map_$map_key");
}
}
$this->query->addField($alias, 'source_row_status', 'migrate_map_source_row_status');
}
// 2. If we are using high water marks, also include rows above the mark.
// But, include all rows if the high water mark is not set.
if ($this->getHighWaterProperty() && ($high_water = $this->getHighWater()) !== '') {
$high_water_field = $this->getHighWaterField();
$conditions->condition($high_water_field, $high_water, '>');
$this->query->orderBy($high_water_field);
}
if ($condition_added) {
$this->query->condition($conditions);
}
$this->query->addField($alias, 'source_row_status', 'migrate_map_source_row_status');
}
// 2. If we are using high water marks, also include rows above the mark.
// But, include all rows if the high water mark is not set.
if ($this->getHighWaterProperty() && ($high_water = $this->getHighWater()) !== '') {
$high_water_field = $this->getHighWaterField();
$conditions->condition($high_water_field, $high_water, '>');
$this->query->orderBy($high_water_field);
}
if ($condition_added) {
$this->query->condition($conditions);
}
// Download data in batches for performance.
if (($this->batchSize > 0)) {
$this->query->range($this->batch * $this->batchSize, $this->batchSize);
}
return new \IteratorIterator($this->query->execute());
}
/**
* Position the iterator to the following row.
*/
protected function fetchNextRow() {
$this->getIterator()->next();
// We might be out of data entirely, or just out of data in the current
// batch. Attempt to fetch the next batch and see.
if ($this->batchSize > 0 && !$this->getIterator()->valid()) {
$this->fetchNextBatch();
}
}
/**
* Prepares query for the next set of data from the source database.
*/
protected function fetchNextBatch() {
$this->batch++;
unset($this->iterator);
$this->getIterator()->rewind();
}
/**
* @return \Drupal\Core\Database\Query\SelectInterface
*/
@ -249,6 +306,14 @@ abstract class SqlBase extends SourcePluginBase implements ContainerFactoryPlugi
if (!$this->getIds()) {
return FALSE;
}
// With batching, we want a later batch to return the same rows that would
// have been returned at the same point within a monolithic query. If we
// join to the map table, the first batch is writing to the map table and
// thus affecting the results of subsequent batches. To be safe, we avoid
// joining to the map table when batching.
if ($this->batchSize > 0) {
return FALSE;
}
$id_map = $this->migration->getIdMap();
if (!$id_map instanceof Sql) {
return FALSE;

View file

@ -0,0 +1,7 @@
type: module
name: Migrate query batch Source test
description: 'Provides a database table and records for SQL import with batch testing.'
package: Testing
core: 8.x
dependencies:
- migrate

View file

@ -0,0 +1,45 @@
<?php
namespace Drupal\migrate_query_batch_test\Plugin\migrate\source;
use Drupal\migrate\Plugin\migrate\source\SqlBase;
/**
* Source plugin for migration high water tests.
*
* @MigrateSource(
* id = "query_batch_test"
* )
*/
class QueryBatchTest extends SqlBase {
/**
* {@inheritdoc}
*/
public function query() {
return ($this->select('query_batch_test', 'q')->fields('q'));
}
/**
* {@inheritdoc}
*/
public function fields() {
$fields = [
'id' => $this->t('Id'),
'data' => $this->t('data'),
];
return $fields;
}
/**
* {@inheritdoc}
*/
public function getIds() {
return [
'id' => [
'type' => 'integer',
],
];
}
}

View file

@ -0,0 +1,81 @@
<?php
namespace Drupal\Tests\migrate\Functional\process;
use Drupal\migrate\MigrateExecutable;
use Drupal\migrate\MigrateMessage;
use Drupal\migrate\Plugin\MigrateIdMapInterface;
use Drupal\migrate\Plugin\MigrationInterface;
use Drupal\Tests\BrowserTestBase;
/**
* Tests the 'download' process plugin.
*
* @group migrate
*/
class DownloadFunctionalTest extends BrowserTestBase {
/**
* {@inheritdoc}
*/
public static $modules = ['migrate', 'file'];
/**
* Tests that an exception is thrown bu migration continues with the next row.
*/
public function testExceptionThrow() {
$invalid_url = "{$this->baseUrl}/not-existent-404";
$valid_url = "{$this->baseUrl}/core/misc/favicon.ico";
$definition = [
'source' => [
'plugin' => 'embedded_data',
'data_rows' => [
['url' => $invalid_url, 'uri' => 'public://first.txt'],
['url' => $valid_url, 'uri' => 'public://second.ico'],
],
'ids' => [
'url' => ['type' => 'string'],
],
],
'process' => [
'uri' => [
'plugin' => 'download',
'source' => ['url', 'uri'],
]
],
'destination' => [
'plugin' => 'entity:file',
],
];
$migration = \Drupal::service('plugin.manager.migration')->createStubMigration($definition);
$executable = new MigrateExecutable($migration, new MigrateMessage());
$result = $executable->import();
// Check that the migration has completed.
$this->assertEquals($result, MigrationInterface::RESULT_COMPLETED);
/** @var \Drupal\migrate\Plugin\MigrateIdMapInterface $id_map_plugin */
$id_map_plugin = $migration->getIdMap();
// Check that the first row was marked as failed in the id map table.
$map_row = $id_map_plugin->getRowBySource(['url' => $invalid_url]);
$this->assertEquals(MigrateIdMapInterface::STATUS_FAILED, $map_row['source_row_status']);
$this->assertNull($map_row['destid1']);
// Check that a message with the thrown exception has been logged.
$messages = $id_map_plugin->getMessageIterator(['url' => $invalid_url])->fetchAll();
$this->assertCount(1, $messages);
$message = reset($messages);
$this->assertEquals("Cannot read from non-readable stream ($invalid_url)", $message->message);
$this->assertEquals(MigrationInterface::MESSAGE_ERROR, $message->level);
// Check that the second row was migrated successfully.
$map_row = $id_map_plugin->getRowBySource(['url' => $valid_url]);
$this->assertEquals(MigrateIdMapInterface::STATUS_IMPORTED, $map_row['source_row_status']);
$this->assertEquals(1, $map_row['destid1']);
}
}

View file

@ -0,0 +1,261 @@
<?php
namespace Drupal\Tests\migrate\Kernel;
use Drupal\KernelTests\KernelTestBase;
use Drupal\migrate\MigrateException;
use Drupal\migrate\Plugin\MigrateIdMapInterface;
use Drupal\migrate\Plugin\MigrationInterface;
use Drupal\Core\Database\Driver\sqlite\Connection;
/**
* Tests query batching.
*
* @covers \Drupal\migrate_query_batch_test\Plugin\migrate\source\QueryBatchTest
* @group migrate
*/
class QueryBatchTest extends KernelTestBase {
/**
* The mocked migration.
*
* @var MigrationInterface|\Prophecy\Prophecy\ObjectProphecy
*/
protected $migration;
/**
* {@inheritdoc}
*/
public static $modules = [
'migrate',
'migrate_query_batch_test',
];
/**
* {@inheritdoc}
*/
protected function setUp() {
parent::setUp();
// Create a mock migration. This will be injected into the source plugin
// under test.
$this->migration = $this->prophesize(MigrationInterface::class);
$this->migration->id()->willReturn(
$this->randomMachineName(16)
);
// Prophesize a useless ID map plugin and an empty set of destination IDs.
// Calling code can override these prophecies later and set up different
// behaviors.
$this->migration->getIdMap()->willReturn(
$this->prophesize(MigrateIdMapInterface::class)->reveal()
);
$this->migration->getDestinationIds()->willReturn([]);
}
/**
* Tests a negative batch size throws an exception.
*/
public function testBatchSizeNegative() {
$this->setExpectedException(MigrateException::class, 'batch_size must be greater than or equal to zero');
$plugin = $this->getPlugin(['batch_size' => -1]);
$plugin->next();
}
/**
* Tests a non integer batch size throws an exception.
*/
public function testBatchSizeNonInteger() {
$this->setExpectedException(MigrateException::class, 'batch_size must be greater than or equal to zero');
$plugin = $this->getPlugin(['batch_size' => '1']);
$plugin->next();
}
/**
* {@inheritdoc}
*/
public function queryDataProvider() {
// Define the parameters for building the data array. The first element is
// the number of source data rows, the second is the batch size to set on
// the plugin configuration.
$test_parameters = [
// Test when batch size is 0.
[200, 0],
// Test when rows mod batch size is 0.
[200, 20],
// Test when rows mod batch size is > 0.
[200, 30],
// Test when batch size = row count.
[200, 200],
// Test when batch size > row count.
[200, 300],
];
// Build the data provider array. The provider array consists of the source
// data rows, the expected result data, the expected count, the plugin
// configuration, the expected batch size and the expected batch count.
$table = 'query_batch_test';
$tests = [];
$data_set = 0;
foreach ($test_parameters as $data) {
list($num_rows, $batch_size) = $data;
for ($i = 0; $i < $num_rows; $i++) {
$tests[$data_set]['source_data'][$table][] = [
'id' => $i,
'data' => $this->randomString(),
];
}
$tests[$data_set]['expected_data'] = $tests[$data_set]['source_data'][$table];
$tests[$data_set][2] = $num_rows;
// Plugin configuration array.
$tests[$data_set][3] = ['batch_size' => $batch_size];
// Expected batch size.
$tests[$data_set][4] = $batch_size;
// Expected batch count is 0 unless a batch size is set.
$expected_batch_count = 0;
if ($batch_size > 0) {
$expected_batch_count = (int) ($num_rows / $batch_size);
if ($num_rows % $batch_size) {
// If there is a remainder an extra batch is needed to get the
// remaining rows.
$expected_batch_count++;
}
}
$tests[$data_set][5] = $expected_batch_count;
$data_set++;
}
return $tests;
}
/**
* Tests query batch size.
*
* @param array $source_data
* The source data, keyed by table name. Each table is an array containing
* the rows in that table.
* @param array $expected_data
* The result rows the plugin is expected to return.
* @param int $num_rows
* How many rows the source plugin is expected to return.
* @param array $configuration
* Configuration for the source plugin specifying the batch size.
* @param int $expected_batch_size
* The expected batch size, will be set to zero for invalid batch sizes.
* @param int $expected_batch_count
* The total number of batches.
*
* @dataProvider queryDataProvider
*/
public function testQueryBatch($source_data, $expected_data, $num_rows, $configuration, $expected_batch_size, $expected_batch_count) {
$plugin = $this->getPlugin($configuration);
// Since we don't yet inject the database connection, we need to use a
// reflection hack to set it in the plugin instance.
$reflector = new \ReflectionObject($plugin);
$property = $reflector->getProperty('database');
$property->setAccessible(TRUE);
$connection = $this->getDatabase($source_data);
$property->setValue($plugin, $connection);
// Test the results.
$i = 0;
/** @var \Drupal\migrate\Row $row */
foreach ($plugin as $row) {
$expected = $expected_data[$i++];
$actual = $row->getSource();
foreach ($expected as $key => $value) {
$this->assertArrayHasKey($key, $actual);
$this->assertSame((string) $value, (string) $actual[$key]);
}
}
// Test that all rows were retrieved.
self::assertSame($num_rows, $i);
// Test the batch size.
if (is_null($expected_batch_size)) {
$expected_batch_size = $configuration['batch_size'];
}
$property = $reflector->getProperty('batchSize');
$property->setAccessible(TRUE);
self::assertSame($expected_batch_size, $property->getValue($plugin));
// Test the batch count.
if (is_null($expected_batch_count)) {
$expected_batch_count = intdiv($num_rows, $expected_batch_size);
if ($num_rows % $configuration['batch_size']) {
$expected_batch_count++;
}
}
$property = $reflector->getProperty('batch');
$property->setAccessible(TRUE);
self::assertSame($expected_batch_count, $property->getValue($plugin));
}
/**
* Instantiates the source plugin under test.
*
* @param array $configuration
* The source plugin configuration.
*
* @return \Drupal\migrate\Plugin\MigrateSourceInterface|object
* The fully configured source plugin.
*/
protected function getPlugin($configuration) {
/** @var \Drupal\migrate\Plugin\MigratePluginManager $plugin_manager */
$plugin_manager = $this->container->get('plugin.manager.migrate.source');
$plugin = $plugin_manager->createInstance('query_batch_test', $configuration, $this->migration->reveal());
$this->migration
->getSourcePlugin()
->willReturn($plugin);
return $plugin;
}
/**
* Builds an in-memory SQLite database from a set of source data.
*
* @param array $source_data
* The source data, keyed by table name. Each table is an array containing
* the rows in that table.
*
* @return \Drupal\Core\Database\Driver\sqlite\Connection
* The SQLite database connection.
*/
protected function getDatabase(array $source_data) {
// Create an in-memory SQLite database. Plugins can interact with it like
// any other database, and it will cease to exist when the connection is
// closed.
$connection_options = ['database' => ':memory:'];
$pdo = Connection::open($connection_options);
$connection = new Connection($pdo, $connection_options);
// Create the tables and fill them with data.
foreach ($source_data as $table => $rows) {
// Use the biggest row to build the table schema.
$counts = array_map('count', $rows);
asort($counts);
end($counts);
$pilot = $rows[key($counts)];
$connection->schema()
->createTable($table, [
// SQLite uses loose affinity typing, so it's OK for every field to
// be a text field.
'fields' => array_map(function () {
return ['type' => 'text'];
}, $pilot),
]);
$fields = array_keys($pilot);
$insert = $connection->insert($table)->fields($fields);
array_walk($rows, [$insert, 'values']);
$insert->execute();
}
return $connection;
}
}

View file

@ -4,6 +4,7 @@ namespace Drupal\Tests\migrate\Kernel\process;
use Drupal\Core\StreamWrapper\StreamWrapperInterface;
use Drupal\KernelTests\Core\File\FileTestBase;
use Drupal\migrate\MigrateException;
use Drupal\migrate\Plugin\migrate\process\FileCopy;
use Drupal\migrate\MigrateExecutableInterface;
use Drupal\migrate\Plugin\MigrateProcessInterface;
@ -12,6 +13,8 @@ use Drupal\migrate\Row;
/**
* Tests the file_copy process plugin.
*
* @coversDefaultClass \Drupal\migrate\Plugin\migrate\process\FileCopy
*
* @group migrate
*/
class FileCopyTest extends FileTestBase {
@ -120,6 +123,32 @@ class FileCopyTest extends FileTestBase {
$this->doTransform($source, 'public://wontmatter.jpg');
}
/**
* Tests that non-writable destination throw an exception.
*
* @covers ::transform
*/
public function testNonWritableDestination() {
$source = $this->createUri('file.txt', NULL, 'temporary');
// Create the parent location.
$this->createDirectory('public://dir');
// Copy the file under public://dir/subdir1/.
$this->doTransform($source, 'public://dir/subdir1/file.txt');
// Check that 'subdir1' was created and the file was successfully migrated.
$this->assertFileExists('public://dir/subdir1/file.txt');
// Remove all permissions from public://dir to trigger a failure when
// trying to create a subdirectory 'subdir2' inside public://dir.
$this->fileSystem->chmod('public://dir', 0);
// Check that the proper exception is raised.
$this->setExpectedException(MigrateException::class, "Could not create or write to directory 'public://dir/subdir2'");
$this->doTransform($source, 'public://dir/subdir2/file.txt');
}
/**
* Test the 'rename' overwrite mode.
*/