Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 0 additions & 21 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 0 additions & 2 deletions packages/testcontainers/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@
"dependencies": {
"@balena/dockerignore": "^1.0.2",
"@types/dockerode": "^4.0.1",
"archiver": "^7.0.1",
"async-lock": "^1.4.1",
"byline": "^5.0.0",
"debug": "^4.4.3",
Expand All @@ -47,7 +46,6 @@
"undici": "^7.22.0"
},
"devDependencies": {
"@types/archiver": "^7.0.0",
"@types/async-lock": "^1.4.2",
"@types/byline": "^4.2.36",
"@types/debug": "^4.1.12",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
import { createWriteStream, promises as fs } from "fs";
import path from "path";
import { Readable } from "stream";
import { pipeline } from "stream/promises";
import tar from "tar-fs";
import tmp from "tmp";
import { Content, ContentToCopy, DirectoryToCopy, FileToCopy } from "../types";

type ArchiveToCopyToContainer = {
filesToCopy?: FileToCopy[];
directoriesToCopy?: DirectoryToCopy[];
contentsToCopy?: ContentToCopy[];
};

export async function createArchiveToCopyToContainer({
filesToCopy = [],
directoriesToCopy = [],
contentsToCopy = [],
}: ArchiveToCopyToContainer): Promise<Readable> {
const stagingDirectory = tmp.dirSync({ unsafeCleanup: true });

try {
for (const { source, target, mode } of filesToCopy) {
await copyFileToStagingDirectory(stagingDirectory.name, source, target, mode);
}

for (const { source, target, mode } of directoriesToCopy) {
await copyDirectoryToStagingDirectory(stagingDirectory.name, source, target, mode);
}

for (const { content, target, mode } of contentsToCopy) {
await copyContentToStagingDirectory(stagingDirectory.name, content, target, mode);
}
} catch (error) {
stagingDirectory.removeCallback();
throw error;
}

const archive = tar.pack(stagingDirectory.name, { dereference: true, umask: 0 } as tar.PackOptions);
let cleanedUp = false;
const cleanup = () => {
if (cleanedUp) {
return;
}

cleanedUp = true;
stagingDirectory.removeCallback();
};

archive.once("end", cleanup);
archive.once("close", cleanup);
archive.once("error", cleanup);

return archive;
}

async function copyFileToStagingDirectory(
stagingDirectory: string,
source: string,
target: string,
mode: number | undefined
): Promise<void> {
const targetPath = getArchiveTargetPath(stagingDirectory, target);
await fs.mkdir(path.dirname(targetPath), { recursive: true });
await fs.cp(source, targetPath, { dereference: true });

if (mode !== undefined) {
await fs.chmod(targetPath, normalizeArchiveMode(mode));
}
}

async function copyDirectoryToStagingDirectory(
stagingDirectory: string,
source: string,
target: string,
mode: number | undefined
): Promise<void> {
const targetPath = getArchiveTargetPath(stagingDirectory, target);
await fs.mkdir(targetPath, { recursive: true });

const entries = await fs.readdir(source);
await Promise.all(
entries.map((entry) =>
fs.cp(path.resolve(source, entry), path.resolve(targetPath, entry), { recursive: true, dereference: true })
)
);

if (mode !== undefined) {
await setModeRecursively(targetPath, normalizeArchiveMode(mode));
}
}

async function copyContentToStagingDirectory(
stagingDirectory: string,
content: Content,
target: string,
mode: number | undefined
): Promise<void> {
const targetPath = getArchiveTargetPath(stagingDirectory, target);
await fs.mkdir(path.dirname(targetPath), { recursive: true });
await writeContentToFile(content, targetPath);

if (mode !== undefined) {
await fs.chmod(targetPath, normalizeArchiveMode(mode));
}
}

async function writeContentToFile(content: Content, targetPath: string): Promise<void> {
if (content instanceof Readable) {
await pipeline(content, createWriteStream(targetPath));
} else {
await fs.writeFile(targetPath, content);
}
}

async function setModeRecursively(targetPath: string, mode: number): Promise<void> {
await fs.chmod(targetPath, mode);

const entries = await fs.readdir(targetPath, { withFileTypes: true });
await Promise.all(
entries.map(async (entry) => {
const entryPath = path.resolve(targetPath, entry.name);

if (entry.isDirectory()) {
await setModeRecursively(entryPath, mode);
} else {
await fs.chmod(entryPath, mode);
}
})
);
}

function getArchiveTargetPath(stagingDirectory: string, target: string): string {
const normalizedTarget = path.posix.resolve("/", target.replace(/\\/gu, "/"));
const relativeTarget = normalizedTarget.slice(1);

if (relativeTarget.length === 0) {
return stagingDirectory;
}

return path.resolve(stagingDirectory, ...relativeTarget.split("/"));
}

function normalizeArchiveMode(mode: number): number {
if (mode <= 0o777) {
return mode;
}

if (mode <= 0o7777 && /^[0-7]+$/u.test(String(mode))) {
return parseInt(String(mode), 8);
}

return mode;
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import archiver from "archiver";
import { promises as fs } from "fs";
import getPort from "get-port";
import path from "path";
import tar from "tar-fs";
import tmp from "tmp";
import { RandomUuid } from "../common";
import { getContainerRuntimeClient } from "../container-runtime";
import { PullPolicy } from "../utils/pull-policy";
Expand All @@ -15,6 +17,41 @@ import {
import { Wait } from "../wait-strategies/wait";
import { GenericContainer } from "./generic-container";

async function createArchiveWithOwnership(target: string, uid: number, gid: number) {
const stagingDirectory = tmp.dirSync({ unsafeCleanup: true });

try {
const fileName = "archive.txt";
await fs.writeFile(path.resolve(stagingDirectory.name, fileName), "hello world");
const archiveEntryName = target.startsWith("/") ? target.slice(1) : target;

const archive = tar.pack(stagingDirectory.name, {
entries: [fileName],
umask: 0,
map: (header) => ({ ...header, name: archiveEntryName, uid, gid }),
} as tar.PackOptions);

let cleanedUp = false;
const cleanup = () => {
if (cleanedUp) {
return;
}

cleanedUp = true;
stagingDirectory.removeCallback();
};

archive.once("end", cleanup);
archive.once("close", cleanup);
archive.once("error", cleanup);

return archive;
} catch (error) {
stagingDirectory.removeCallback();
throw error;
}
}

describe("GenericContainer", { timeout: 180_000 }, () => {
const fixtures = path.resolve(__dirname, "..", "..", "fixtures", "docker");

Expand Down Expand Up @@ -521,9 +558,7 @@ describe("GenericContainer", { timeout: 180_000 }, () => {
.withExposedPorts(8080)
.start();

const tar = archiver("tar");
tar.append("hello world", { name: targetWithCopyOwnership.slice(1), uid, gid } as archiver.EntryData);
tar.finalize();
const tar = await createArchiveWithOwnership(targetWithCopyOwnership, uid, gid);

await container.copyArchiveToContainer(tar, "/", { copyUIDGID: true });

Expand All @@ -536,9 +571,7 @@ describe("GenericContainer", { timeout: 180_000 }, () => {
const uid = 4242;
const gid = 4343;
const targetWithCopyOwnership = "/tmp/with-copy-archives-copyuidgid.txt";
const tar = archiver("tar");
tar.append("hello world", { name: targetWithCopyOwnership.slice(1), uid, gid } as archiver.EntryData);
tar.finalize();
const tar = await createArchiveWithOwnership(targetWithCopyOwnership, uid, gid);

await using containerWithCopyOwnership = await new GenericContainer("cristianrgreco/testcontainer:1.1.14")
.withCopyArchivesToContainer([
Expand Down
32 changes: 6 additions & 26 deletions packages/testcontainers/src/generic-container/generic-container.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import archiver from "archiver";
import AsyncLock from "async-lock";
import { Container, ContainerCreateOptions, HostConfig } from "dockerode";
import { promises as fs } from "fs";
import { Readable } from "stream";
import { containerLog, hash, log, toNanos } from "../common";
import { ContainerRuntimeClient, getContainerRuntimeClient, ImageName } from "../container-runtime";
Expand Down Expand Up @@ -34,6 +32,7 @@ import { ImagePullPolicy, PullPolicy } from "../utils/pull-policy";
import { Wait } from "../wait-strategies/wait";
import { waitForContainer } from "../wait-strategies/wait-for-container";
import { WaitStrategy } from "../wait-strategies/wait-strategy";
import { createArchiveToCopyToContainer } from "./create-archive-to-copy-to-container";
import { GenericContainerBuilder } from "./generic-container-builder";
import { inspectContainerUntilPortsExposed } from "./inspect-container-util-ports-exposed";
import { StartedGenericContainer } from "./started-generic-container";
Expand Down Expand Up @@ -182,8 +181,11 @@ export class GenericContainer implements TestContainer {
}

if (this.filesToCopy.length > 0 || this.directoriesToCopy.length > 0 || this.contentsToCopy.length > 0) {
const archive = await this.createArchiveToCopyToContainer();
archive.finalize();
const archive = await createArchiveToCopyToContainer({
filesToCopy: this.filesToCopy,
directoriesToCopy: this.directoriesToCopy,
contentsToCopy: this.contentsToCopy,
});
await client.container.putArchive(container, archive, "/", this.copyToContainerOptions);
}

Expand Down Expand Up @@ -258,28 +260,6 @@ export class GenericContainer implements TestContainer {
}
}

private async createArchiveToCopyToContainer(): Promise<archiver.Archiver> {
const tar = archiver("tar");
const filesToCopyWithStats = await Promise.all(
this.filesToCopy.map(async (fileToCopy) => ({
...fileToCopy,
stats: await fs.stat(fileToCopy.source),
}))
);

for (const { source, target, mode, stats } of filesToCopyWithStats) {
tar.file(source, { name: target, mode, stats });
}
for (const { source, target, mode } of this.directoriesToCopy) {
tar.directory(source, target, { mode });
}
for (const { content, target, mode } of this.contentsToCopy) {
tar.append(content, { name: target, mode });
}

return tar;
}

protected containerStarted?(
container: StartedTestContainer,
inspectResult: InspectResult,
Expand Down
Loading
Loading