Skip to content

Commit

Permalink
Publish
Browse files Browse the repository at this point in the history
  • Loading branch information
thedrlambda committed Nov 26, 2024
1 parent 9cdf50e commit 3cdfa0c
Show file tree
Hide file tree
Showing 21 changed files with 414 additions and 231 deletions.
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,13 @@
## Fixes and improvements
-

# 4.2.0
## Added features
- Readded `build` command
## Fixes and improvements
- Fix bug where repos would be empty after `fetch` or `org checkout`
- Prevent checking out an organization into an existing folder

# 4.1.0
## Added features
- `rename` an organization
Expand Down
2 changes: 1 addition & 1 deletion contexts.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
Object.defineProperty(exports, "__esModule", { value: true });
exports.CONTEXTS = void 0;
const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
const prompt_1 = require("./prompt");
const utils_1 = require("./utils");
const path_1 = __importDefault(require("path"));
function downOrg(cmd) {
const folders = fs_1.default.readdirSync(".");
let org = undefined;
Expand Down
4 changes: 2 additions & 2 deletions contexts.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import fs from "fs";
import { GREEN, NORMAL_COLOR, YELLOW } from "./prompt";
import { Path, directoryNames, fetchOrg, fetchOrgRaw } from "./utils";
import path from "path";
import { GREEN, NORMAL_COLOR } from "./prompt";
import { Path, directoryNames, fetchOrgRaw } from "./utils";

function downOrg(cmd: string) {
const folders = fs.readdirSync(".");
Expand Down
Binary file modified dist/windows.zip
Binary file not shown.
36 changes: 36 additions & 0 deletions newCommands/build.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.build = exports.do_build = void 0;
const detect_project_type_1 = require("@merrymake/detect-project-type");
const utils_1 = require("../utils");
function do_build() {
return __awaiter(this, void 0, void 0, function* () {
try {
const projectType = (0, detect_project_type_1.detectProjectType)(".");
(0, utils_1.output2)(`Building ${projectType} project...`);
const buildCommands = detect_project_type_1.BUILD_SCRIPT_MAKERS[projectType](".");
for (let i = 0; i < buildCommands.length; i++) {
const x = buildCommands[i];
yield (0, utils_1.spawnPromise)(x);
}
}
catch (e) {
throw e;
}
});
}
exports.do_build = do_build;
function build() {
(0, utils_1.addToExecuteQueue)(() => do_build());
return (0, utils_1.finish)();
}
exports.build = build;
24 changes: 24 additions & 0 deletions newCommands/build.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import {
BUILD_SCRIPT_MAKERS,
detectProjectType,
} from "@merrymake/detect-project-type";
import { addToExecuteQueue, finish, output2, spawnPromise } from "../utils";

export async function do_build() {
try {
const projectType = detectProjectType(".");
output2(`Building ${projectType} project...`);
const buildCommands = BUILD_SCRIPT_MAKERS[projectType](".");
for (let i = 0; i < buildCommands.length; i++) {
const x = buildCommands[i];
await spawnPromise(x);
}
} catch (e) {
throw e;
}
}

export function build() {
addToExecuteQueue(() => do_build());
return finish();
}
15 changes: 10 additions & 5 deletions newCommands/clone.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,22 +38,21 @@ function do_clone(struct, folderName, displayName, organizationId) {
yield (0, utils_1.execPromise)(`git init --initial-branch=main`, publicDir);
yield (0, utils_1.execPromise)(`git remote add origin "${config_1.GIT_HOST}/o${organizationId}/public"`, publicDir);
fs_1.default.writeFileSync(publicDir + "/index.html", "<html><body>Hello, World!</body></html>");
(0, fetch_1.ensureGroupStructure)({ pathTo: new types_1.PathToOrganization(folderName), id: organizationId }, struct);
yield (0, fetch_1.ensureGroupStructure)({ pathTo: new types_1.PathToOrganization(folderName), id: organizationId }, struct);
}
catch (e) {
throw e;
}
});
}
exports.do_clone = do_clone;
function do_fetch_clone(displayName, organizationId) {
function do_fetch_clone(displayName, folderName, organizationId) {
return __awaiter(this, void 0, void 0, function* () {
try {
const reply = yield (0, utils_1.sshReq)(`organization-fetch`, organizationId.toString());
if (!reply.startsWith("{"))
throw reply;
const structure = JSON.parse(reply);
const folderName = (0, utils_1.toFolderName)(displayName);
yield do_clone(structure, folderName, displayName, organizationId);
}
catch (e) {
Expand All @@ -63,8 +62,14 @@ function do_fetch_clone(displayName, organizationId) {
}
exports.do_fetch_clone = do_fetch_clone;
function checkout_org(displayName, organizationId) {
(0, utils_1.addToExecuteQueue)(() => do_fetch_clone(displayName, organizationId));
return (0, utils_1.finish)();
return __awaiter(this, void 0, void 0, function* () {
const folderName = (0, utils_1.toFolderName)(displayName);
if (fs_1.default.existsSync(folderName)) {
throw `Folder '${folderName}' already exists.`;
}
(0, utils_1.addToExecuteQueue)(() => do_fetch_clone(displayName, folderName, organizationId));
return (0, utils_1.finish)();
});
}
exports.checkout_org = checkout_org;
function checkout() {
Expand Down
14 changes: 10 additions & 4 deletions newCommands/clone.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ export async function do_clone(
publicDir + "/index.html",
"<html><body>Hello, World!</body></html>"
);
ensureGroupStructure(
await ensureGroupStructure(
{ pathTo: new PathToOrganization(folderName), id: organizationId },
struct
);
Expand All @@ -60,24 +60,30 @@ export async function do_clone(

export async function do_fetch_clone(
displayName: string,
folderName: string,
organizationId: OrganizationId
) {
try {
const reply = await sshReq(`organization-fetch`, organizationId.toString());
if (!reply.startsWith("{")) throw reply;
const structure = JSON.parse(reply);
const folderName = toFolderName(displayName);
await do_clone(structure, folderName, displayName, organizationId);
} catch (e) {
throw e;
}
}

export function checkout_org(
export async function checkout_org(
displayName: string,
organizationId: OrganizationId
) {
addToExecuteQueue(() => do_fetch_clone(displayName, organizationId));
const folderName = toFolderName(displayName);
if (fs.existsSync(folderName)) {
throw `Folder '${folderName}' already exists.`;
}
addToExecuteQueue(() =>
do_fetch_clone(displayName, folderName, organizationId)
);
return finish();
}

Expand Down
55 changes: 30 additions & 25 deletions newCommands/fetch.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.fetch = exports.ensureGroupStructure = void 0;
exports.fetch = exports.do_fetch = exports.ensureGroupStructure = void 0;
const fs_1 = __importDefault(require("fs"));
const config_1 = require("../config");
const types_1 = require("../types");
Expand Down Expand Up @@ -48,31 +48,32 @@ function getCurrentStructure(pathToOrganization) {
});
}
function ensureRepositoryStructure(organizationId, serviceGroup, toBe, asIs) {
Object.keys(toBe).forEach((repositoryId) => {
const repositoryDisplayName = toBe[repositoryId];
const folderName = (0, utils_1.toFolderName)(repositoryDisplayName);
const pathToRepository = serviceGroup.pathTo.with(folderName);
if (asIs[repositoryId] === undefined) {
createServiceFolder(organizationId, serviceGroup.id, {
return __awaiter(this, void 0, void 0, function* () {
yield Promise.all(Object.keys(toBe).map((repositoryId) => __awaiter(this, void 0, void 0, function* () {
const repositoryDisplayName = toBe[repositoryId];
const folderName = (0, utils_1.toFolderName)(repositoryDisplayName);
const pathToRepository = serviceGroup.pathTo.with(folderName);
if (asIs[repositoryId] !== undefined &&
asIs[repositoryId] !== folderName) {
fs_1.default.renameSync(serviceGroup.pathTo.with(asIs[repositoryId]).toString(), pathToRepository.toString());
}
yield ensureServiceFolder(organizationId, serviceGroup.id, {
pathTo: pathToRepository,
id: new types_1.RepositoryId(repositoryId),
});
}
else if (asIs[repositoryId] !== folderName) {
fs_1.default.renameSync(serviceGroup.pathTo.with(asIs[repositoryId]).toString(), pathToRepository.toString());
}
delete asIs[repositoryId];
});
Object.keys(asIs).forEach((repositoryId) => {
const folderName = asIs[repositoryId];
// TODO Delete
console.log("Delete", serviceGroup.pathTo.with(folderName).toString());
}).then();
delete asIs[repositoryId];
})));
yield Promise.all(Object.keys(asIs).map((repositoryId) => {
const folderName = asIs[repositoryId];
// TODO Delete
console.log("Delete", serviceGroup.pathTo.with(folderName).toString());
}));
});
}
function ensureGroupStructure(organization, toBe) {
return __awaiter(this, void 0, void 0, function* () {
const asIs = yield getCurrentStructure(organization.pathTo);
Object.keys(toBe).forEach((serviceGroupId) => {
yield Promise.all(Object.keys(toBe).map((serviceGroupId) => __awaiter(this, void 0, void 0, function* () {
const group = toBe[serviceGroupId];
const folderName = (0, utils_1.toFolderName)(group.displayName);
const pathToGroup = organization.pathTo.with(folderName);
Expand All @@ -88,20 +89,21 @@ function ensureGroupStructure(organization, toBe) {
}
asIsRepos = asIs[serviceGroupId].repositories;
}
ensureRepositoryStructure(organization.id, { pathTo: pathToGroup, id: new types_1.ServiceGroupId(serviceGroupId) }, group.repositories, asIsRepos);
yield ensureRepositoryStructure(organization.id, { pathTo: pathToGroup, id: new types_1.ServiceGroupId(serviceGroupId) }, group.repositories, asIsRepos);
delete asIs[serviceGroupId];
});
Object.keys(asIs).forEach((groupId) => {
})));
yield Promise.all(Object.keys(asIs).map((groupId) => {
const group = asIs[groupId];
const folderName = group.name;
// TODO Delete
console.log("Delete", organization.pathTo.with(folderName).toString());
});
}));
});
}
exports.ensureGroupStructure = ensureGroupStructure;
function createServiceFolder(organizationId, groupId, repository) {
function ensureServiceFolder(organizationId, groupId, repository) {
return __awaiter(this, void 0, void 0, function* () {
process.stdout.write(".");
const dir = repository.pathTo.toString();
const repo = `"${config_1.GIT_HOST}/o${organizationId}/g${groupId}/r${repository.id}"`;
try {
Expand Down Expand Up @@ -139,14 +141,17 @@ function do_fetch(organization) {
if (!reply.startsWith("{"))
throw reply;
const structure = JSON.parse(reply);
(0, utils_1.output2)(`Consolidating...`);
process.stdout.write(`Consolidating`);
yield ensureGroupStructure(organization, structure);
process.stdout.write("\n");
return structure;
}
catch (e) {
throw e;
}
});
}
exports.do_fetch = do_fetch;
function fetch(organization) {
(0, utils_1.addToExecuteQueue)(() => do_fetch(organization));
return (0, utils_1.finish)();
Expand Down
Loading

0 comments on commit 3cdfa0c

Please sign in to comment.