diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 36d65bf..42236cf 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,9 +7,6 @@ on: - 'index.js' - 'package.json' - 'package-lock.json' - - 'pacman-tool-repo/setup.sh' - - 'pacman-tool-repo/repo-add' - - 'pacman-tool-repo/util/**' - 'node_modules/**' - '.github/workflows/test.yml' @@ -18,15 +15,8 @@ jobs: runs-on: ubuntu-latest name: Check commands steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: ./ - run: | - ./test.sh - #debug2: - # runs-on: ubuntu-latest - # name: Check commands - # steps: - # - uses: actions/checkout@v3 - # - uses: termux-pacman/pacman-tool-repo@v1 - # - run: | - # ./test.sh + sudo apt search pacman + repo-add --version diff --git a/README.md b/README.md index 9699ce6..2b32021 100644 --- a/README.md +++ b/README.md @@ -10,8 +10,8 @@ jobs: runs-on: ubuntu-latest name: Example steps: - - uses: actions/checkout@v3 - - uses: termux-pacman/pacman-tool-repo@v1.1 + - uses: actions/checkout@v4 + - uses: termux-pacman/pacman-tool-repo@v2 - run: | repo-add --help repo-remove --help diff --git a/action.yml b/action.yml index b2ac71e..fea34cb 100644 --- a/action.yml +++ b/action.yml @@ -5,5 +5,5 @@ branding: icon: 'database' color: 'blue' runs: - using: 'node16' + using: 'node20' main: 'index.js' diff --git a/index.js b/index.js index c6eb70e..093f837 100644 --- a/index.js +++ b/index.js @@ -1,24 +1,20 @@ // Running a script that sets everything up const exec = require('@actions/exec'); -const fs = require('fs'); -var https = require('follow-redirects').https; -const process = require('process'); -const os = require('os'); -const path = require('path'); -tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "ptr")); -process.chdir(tmpDir); -const file = fs.createWriteStream("pacman-tool-repo.tar.xz"); -const request = https.get("https://github.com/termux-pacman/pacman-tool-repo/raw/main/pacman-tool-repo.tar.xz", function(response) { - response.pipe(file); - file.on("finish", async() => { - file.close(); - await exec.exec("tar xJf pacman-tool-repo.tar.xz"); - fs.unlinkSync("pacman-tool-repo.tar.xz"); - process.chdir("pacman-tool-repo/"); - await exec.exec("sudo ./setup.sh"); - process.chdir(__dirname); - fs.rmSync(tmpDir, {recursive:true, force:true}); - }); -}); +async function start() { + await exec.exec("sudo su -c \"echo 'deb http://archive.ubuntu.com/ubuntu/ lunar universe' > /etc/apt/sources.list.d/lunar.list\""); + await exec.exec("sudo su -c \"echo 'deb-src http://archive.ubuntu.com/ubuntu/ lunar universe' >> /etc/apt/sources.list.d/lunar.list\""); + + while (true) { + try { + await exec.exec("sudo apt update -y"); + await exec.exec("sudo apt install pacman-package-manager -y"); + break; + } catch (error) { + continue; + } + } +} + +start(); diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json index 9a36d62..14fb78d 100644 --- a/node_modules/.package-lock.json +++ b/node_modules/.package-lock.json @@ -16,25 +16,6 @@ "version": "1.1.3", "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz", "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==" - }, - "node_modules/follow-redirects": { - "version": "1.15.2", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz", - "integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==", - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/RubenVerborgh" - } - ], - "engines": { - "node": ">=4.0" - }, - "peerDependenciesMeta": { - "debug": { - "optional": true - } - } } } } diff --git a/node_modules/follow-redirects/LICENSE b/node_modules/follow-redirects/LICENSE deleted file mode 100644 index 742cbad..0000000 --- a/node_modules/follow-redirects/LICENSE +++ /dev/null @@ -1,18 +0,0 @@ -Copyright 2014–present Olivier Lalonde , James Talmage , Ruben Verborgh - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/follow-redirects/README.md b/node_modules/follow-redirects/README.md deleted file mode 100644 index eb869a6..0000000 --- a/node_modules/follow-redirects/README.md +++ /dev/null @@ -1,155 +0,0 @@ -## Follow Redirects - -Drop-in replacement for Node's `http` and `https` modules that automatically follows redirects. - -[![npm version](https://img.shields.io/npm/v/follow-redirects.svg)](https://www.npmjs.com/package/follow-redirects) -[![Build Status](https://github.com/follow-redirects/follow-redirects/workflows/CI/badge.svg)](https://github.com/follow-redirects/follow-redirects/actions) -[![Coverage Status](https://coveralls.io/repos/follow-redirects/follow-redirects/badge.svg?branch=master)](https://coveralls.io/r/follow-redirects/follow-redirects?branch=master) -[![npm downloads](https://img.shields.io/npm/dm/follow-redirects.svg)](https://www.npmjs.com/package/follow-redirects) -[![Sponsor on GitHub](https://img.shields.io/static/v1?label=Sponsor&message=%F0%9F%92%96&logo=GitHub)](https://github.com/sponsors/RubenVerborgh) - -`follow-redirects` provides [request](https://nodejs.org/api/http.html#http_http_request_options_callback) and [get](https://nodejs.org/api/http.html#http_http_get_options_callback) - methods that behave identically to those found on the native [http](https://nodejs.org/api/http.html#http_http_request_options_callback) and [https](https://nodejs.org/api/https.html#https_https_request_options_callback) - modules, with the exception that they will seamlessly follow redirects. - -```javascript -const { http, https } = require('follow-redirects'); - -http.get('http://bit.ly/900913', response => { - response.on('data', chunk => { - console.log(chunk); - }); -}).on('error', err => { - console.error(err); -}); -``` - -You can inspect the final redirected URL through the `responseUrl` property on the `response`. -If no redirection happened, `responseUrl` is the original request URL. - -```javascript -const request = https.request({ - host: 'bitly.com', - path: '/UHfDGO', -}, response => { - console.log(response.responseUrl); - // 'http://duckduckgo.com/robots.txt' -}); -request.end(); -``` - -## Options -### Global options -Global options are set directly on the `follow-redirects` module: - -```javascript -const followRedirects = require('follow-redirects'); -followRedirects.maxRedirects = 10; -followRedirects.maxBodyLength = 20 * 1024 * 1024; // 20 MB -``` - -The following global options are supported: - -- `maxRedirects` (default: `21`) – sets the maximum number of allowed redirects; if exceeded, an error will be emitted. - -- `maxBodyLength` (default: 10MB) – sets the maximum size of the request body; if exceeded, an error will be emitted. - -### Per-request options -Per-request options are set by passing an `options` object: - -```javascript -const url = require('url'); -const { http, https } = require('follow-redirects'); - -const options = url.parse('http://bit.ly/900913'); -options.maxRedirects = 10; -options.beforeRedirect = (options, response, request) => { - // Use this to adjust the request options upon redirecting, - // to inspect the latest response headers, - // or to cancel the request by throwing an error - - // response.headers = the redirect response headers - // response.statusCode = the redirect response code (eg. 301, 307, etc.) - - // request.url = the requested URL that resulted in a redirect - // request.headers = the headers in the request that resulted in a redirect - // request.method = the method of the request that resulted in a redirect - if (options.hostname === "example.com") { - options.auth = "user:password"; - } -}; -http.request(options); -``` - -In addition to the [standard HTTP](https://nodejs.org/api/http.html#http_http_request_options_callback) and [HTTPS options](https://nodejs.org/api/https.html#https_https_request_options_callback), -the following per-request options are supported: -- `followRedirects` (default: `true`) – whether redirects should be followed. - -- `maxRedirects` (default: `21`) – sets the maximum number of allowed redirects; if exceeded, an error will be emitted. - -- `maxBodyLength` (default: 10MB) – sets the maximum size of the request body; if exceeded, an error will be emitted. - -- `beforeRedirect` (default: `undefined`) – optionally change the request `options` on redirects, or abort the request by throwing an error. - -- `agents` (default: `undefined`) – sets the `agent` option per protocol, since HTTP and HTTPS use different agents. Example value: `{ http: new http.Agent(), https: new https.Agent() }` - -- `trackRedirects` (default: `false`) – whether to store the redirected response details into the `redirects` array on the response object. - - -### Advanced usage -By default, `follow-redirects` will use the Node.js default implementations -of [`http`](https://nodejs.org/api/http.html) -and [`https`](https://nodejs.org/api/https.html). -To enable features such as caching and/or intermediate request tracking, -you might instead want to wrap `follow-redirects` around custom protocol implementations: - -```javascript -const { http, https } = require('follow-redirects').wrap({ - http: require('your-custom-http'), - https: require('your-custom-https'), -}); -``` - -Such custom protocols only need an implementation of the `request` method. - -## Browser Usage - -Due to the way the browser works, -the `http` and `https` browser equivalents perform redirects by default. - -By requiring `follow-redirects` this way: -```javascript -const http = require('follow-redirects/http'); -const https = require('follow-redirects/https'); -``` -you can easily tell webpack and friends to replace -`follow-redirect` by the built-in versions: - -```json -{ - "follow-redirects/http" : "http", - "follow-redirects/https" : "https" -} -``` - -## Contributing - -Pull Requests are always welcome. Please [file an issue](https://github.com/follow-redirects/follow-redirects/issues) - detailing your proposal before you invest your valuable time. Additional features and bug fixes should be accompanied - by tests. You can run the test suite locally with a simple `npm test` command. - -## Debug Logging - -`follow-redirects` uses the excellent [debug](https://www.npmjs.com/package/debug) for logging. To turn on logging - set the environment variable `DEBUG=follow-redirects` for debug output from just this module. When running the test - suite it is sometimes advantageous to set `DEBUG=*` to see output from the express server as well. - -## Authors - -- [Ruben Verborgh](https://ruben.verborgh.org/) -- [Olivier Lalonde](mailto:olalonde@gmail.com) -- [James Talmage](mailto:james@talmage.io) - -## License - -[MIT License](https://github.com/follow-redirects/follow-redirects/blob/master/LICENSE) diff --git a/node_modules/follow-redirects/debug.js b/node_modules/follow-redirects/debug.js deleted file mode 100644 index decb77d..0000000 --- a/node_modules/follow-redirects/debug.js +++ /dev/null @@ -1,15 +0,0 @@ -var debug; - -module.exports = function () { - if (!debug) { - try { - /* eslint global-require: off */ - debug = require("debug")("follow-redirects"); - } - catch (error) { /* */ } - if (typeof debug !== "function") { - debug = function () { /* */ }; - } - } - debug.apply(null, arguments); -}; diff --git a/node_modules/follow-redirects/http.js b/node_modules/follow-redirects/http.js deleted file mode 100644 index 695e356..0000000 --- a/node_modules/follow-redirects/http.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./").http; diff --git a/node_modules/follow-redirects/https.js b/node_modules/follow-redirects/https.js deleted file mode 100644 index d21c921..0000000 --- a/node_modules/follow-redirects/https.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./").https; diff --git a/node_modules/follow-redirects/index.js b/node_modules/follow-redirects/index.js deleted file mode 100644 index 3e199c1..0000000 --- a/node_modules/follow-redirects/index.js +++ /dev/null @@ -1,621 +0,0 @@ -var url = require("url"); -var URL = url.URL; -var http = require("http"); -var https = require("https"); -var Writable = require("stream").Writable; -var assert = require("assert"); -var debug = require("./debug"); - -// Create handlers that pass events from native requests -var events = ["abort", "aborted", "connect", "error", "socket", "timeout"]; -var eventHandlers = Object.create(null); -events.forEach(function (event) { - eventHandlers[event] = function (arg1, arg2, arg3) { - this._redirectable.emit(event, arg1, arg2, arg3); - }; -}); - -var InvalidUrlError = createErrorType( - "ERR_INVALID_URL", - "Invalid URL", - TypeError -); -// Error types with codes -var RedirectionError = createErrorType( - "ERR_FR_REDIRECTION_FAILURE", - "Redirected request failed" -); -var TooManyRedirectsError = createErrorType( - "ERR_FR_TOO_MANY_REDIRECTS", - "Maximum number of redirects exceeded" -); -var MaxBodyLengthExceededError = createErrorType( - "ERR_FR_MAX_BODY_LENGTH_EXCEEDED", - "Request body larger than maxBodyLength limit" -); -var WriteAfterEndError = createErrorType( - "ERR_STREAM_WRITE_AFTER_END", - "write after end" -); - -// An HTTP(S) request that can be redirected -function RedirectableRequest(options, responseCallback) { - // Initialize the request - Writable.call(this); - this._sanitizeOptions(options); - this._options = options; - this._ended = false; - this._ending = false; - this._redirectCount = 0; - this._redirects = []; - this._requestBodyLength = 0; - this._requestBodyBuffers = []; - - // Attach a callback if passed - if (responseCallback) { - this.on("response", responseCallback); - } - - // React to responses of native requests - var self = this; - this._onNativeResponse = function (response) { - self._processResponse(response); - }; - - // Perform the first request - this._performRequest(); -} -RedirectableRequest.prototype = Object.create(Writable.prototype); - -RedirectableRequest.prototype.abort = function () { - abortRequest(this._currentRequest); - this.emit("abort"); -}; - -// Writes buffered data to the current native request -RedirectableRequest.prototype.write = function (data, encoding, callback) { - // Writing is not allowed if end has been called - if (this._ending) { - throw new WriteAfterEndError(); - } - - // Validate input and shift parameters if necessary - if (!isString(data) && !isBuffer(data)) { - throw new TypeError("data should be a string, Buffer or Uint8Array"); - } - if (isFunction(encoding)) { - callback = encoding; - encoding = null; - } - - // Ignore empty buffers, since writing them doesn't invoke the callback - // https://github.com/nodejs/node/issues/22066 - if (data.length === 0) { - if (callback) { - callback(); - } - return; - } - // Only write when we don't exceed the maximum body length - if (this._requestBodyLength + data.length <= this._options.maxBodyLength) { - this._requestBodyLength += data.length; - this._requestBodyBuffers.push({ data: data, encoding: encoding }); - this._currentRequest.write(data, encoding, callback); - } - // Error when we exceed the maximum body length - else { - this.emit("error", new MaxBodyLengthExceededError()); - this.abort(); - } -}; - -// Ends the current native request -RedirectableRequest.prototype.end = function (data, encoding, callback) { - // Shift parameters if necessary - if (isFunction(data)) { - callback = data; - data = encoding = null; - } - else if (isFunction(encoding)) { - callback = encoding; - encoding = null; - } - - // Write data if needed and end - if (!data) { - this._ended = this._ending = true; - this._currentRequest.end(null, null, callback); - } - else { - var self = this; - var currentRequest = this._currentRequest; - this.write(data, encoding, function () { - self._ended = true; - currentRequest.end(null, null, callback); - }); - this._ending = true; - } -}; - -// Sets a header value on the current native request -RedirectableRequest.prototype.setHeader = function (name, value) { - this._options.headers[name] = value; - this._currentRequest.setHeader(name, value); -}; - -// Clears a header value on the current native request -RedirectableRequest.prototype.removeHeader = function (name) { - delete this._options.headers[name]; - this._currentRequest.removeHeader(name); -}; - -// Global timeout for all underlying requests -RedirectableRequest.prototype.setTimeout = function (msecs, callback) { - var self = this; - - // Destroys the socket on timeout - function destroyOnTimeout(socket) { - socket.setTimeout(msecs); - socket.removeListener("timeout", socket.destroy); - socket.addListener("timeout", socket.destroy); - } - - // Sets up a timer to trigger a timeout event - function startTimer(socket) { - if (self._timeout) { - clearTimeout(self._timeout); - } - self._timeout = setTimeout(function () { - self.emit("timeout"); - clearTimer(); - }, msecs); - destroyOnTimeout(socket); - } - - // Stops a timeout from triggering - function clearTimer() { - // Clear the timeout - if (self._timeout) { - clearTimeout(self._timeout); - self._timeout = null; - } - - // Clean up all attached listeners - self.removeListener("abort", clearTimer); - self.removeListener("error", clearTimer); - self.removeListener("response", clearTimer); - if (callback) { - self.removeListener("timeout", callback); - } - if (!self.socket) { - self._currentRequest.removeListener("socket", startTimer); - } - } - - // Attach callback if passed - if (callback) { - this.on("timeout", callback); - } - - // Start the timer if or when the socket is opened - if (this.socket) { - startTimer(this.socket); - } - else { - this._currentRequest.once("socket", startTimer); - } - - // Clean up on events - this.on("socket", destroyOnTimeout); - this.on("abort", clearTimer); - this.on("error", clearTimer); - this.on("response", clearTimer); - - return this; -}; - -// Proxy all other public ClientRequest methods -[ - "flushHeaders", "getHeader", - "setNoDelay", "setSocketKeepAlive", -].forEach(function (method) { - RedirectableRequest.prototype[method] = function (a, b) { - return this._currentRequest[method](a, b); - }; -}); - -// Proxy all public ClientRequest properties -["aborted", "connection", "socket"].forEach(function (property) { - Object.defineProperty(RedirectableRequest.prototype, property, { - get: function () { return this._currentRequest[property]; }, - }); -}); - -RedirectableRequest.prototype._sanitizeOptions = function (options) { - // Ensure headers are always present - if (!options.headers) { - options.headers = {}; - } - - // Since http.request treats host as an alias of hostname, - // but the url module interprets host as hostname plus port, - // eliminate the host property to avoid confusion. - if (options.host) { - // Use hostname if set, because it has precedence - if (!options.hostname) { - options.hostname = options.host; - } - delete options.host; - } - - // Complete the URL object when necessary - if (!options.pathname && options.path) { - var searchPos = options.path.indexOf("?"); - if (searchPos < 0) { - options.pathname = options.path; - } - else { - options.pathname = options.path.substring(0, searchPos); - options.search = options.path.substring(searchPos); - } - } -}; - - -// Executes the next native request (initial or redirect) -RedirectableRequest.prototype._performRequest = function () { - // Load the native protocol - var protocol = this._options.protocol; - var nativeProtocol = this._options.nativeProtocols[protocol]; - if (!nativeProtocol) { - this.emit("error", new TypeError("Unsupported protocol " + protocol)); - return; - } - - // If specified, use the agent corresponding to the protocol - // (HTTP and HTTPS use different types of agents) - if (this._options.agents) { - var scheme = protocol.slice(0, -1); - this._options.agent = this._options.agents[scheme]; - } - - // Create the native request and set up its event handlers - var request = this._currentRequest = - nativeProtocol.request(this._options, this._onNativeResponse); - request._redirectable = this; - for (var event of events) { - request.on(event, eventHandlers[event]); - } - - // RFC7230§5.3.1: When making a request directly to an origin server, […] - // a client MUST send only the absolute path […] as the request-target. - this._currentUrl = /^\//.test(this._options.path) ? - url.format(this._options) : - // When making a request to a proxy, […] - // a client MUST send the target URI in absolute-form […]. - this._options.path; - - // End a redirected request - // (The first request must be ended explicitly with RedirectableRequest#end) - if (this._isRedirect) { - // Write the request entity and end - var i = 0; - var self = this; - var buffers = this._requestBodyBuffers; - (function writeNext(error) { - // Only write if this request has not been redirected yet - /* istanbul ignore else */ - if (request === self._currentRequest) { - // Report any write errors - /* istanbul ignore if */ - if (error) { - self.emit("error", error); - } - // Write the next buffer if there are still left - else if (i < buffers.length) { - var buffer = buffers[i++]; - /* istanbul ignore else */ - if (!request.finished) { - request.write(buffer.data, buffer.encoding, writeNext); - } - } - // End the request if `end` has been called on us - else if (self._ended) { - request.end(); - } - } - }()); - } -}; - -// Processes a response from the current native request -RedirectableRequest.prototype._processResponse = function (response) { - // Store the redirected response - var statusCode = response.statusCode; - if (this._options.trackRedirects) { - this._redirects.push({ - url: this._currentUrl, - headers: response.headers, - statusCode: statusCode, - }); - } - - // RFC7231§6.4: The 3xx (Redirection) class of status code indicates - // that further action needs to be taken by the user agent in order to - // fulfill the request. If a Location header field is provided, - // the user agent MAY automatically redirect its request to the URI - // referenced by the Location field value, - // even if the specific status code is not understood. - - // If the response is not a redirect; return it as-is - var location = response.headers.location; - if (!location || this._options.followRedirects === false || - statusCode < 300 || statusCode >= 400) { - response.responseUrl = this._currentUrl; - response.redirects = this._redirects; - this.emit("response", response); - - // Clean up - this._requestBodyBuffers = []; - return; - } - - // The response is a redirect, so abort the current request - abortRequest(this._currentRequest); - // Discard the remainder of the response to avoid waiting for data - response.destroy(); - - // RFC7231§6.4: A client SHOULD detect and intervene - // in cyclical redirections (i.e., "infinite" redirection loops). - if (++this._redirectCount > this._options.maxRedirects) { - this.emit("error", new TooManyRedirectsError()); - return; - } - - // Store the request headers if applicable - var requestHeaders; - var beforeRedirect = this._options.beforeRedirect; - if (beforeRedirect) { - requestHeaders = Object.assign({ - // The Host header was set by nativeProtocol.request - Host: response.req.getHeader("host"), - }, this._options.headers); - } - - // RFC7231§6.4: Automatic redirection needs to done with - // care for methods not known to be safe, […] - // RFC7231§6.4.2–3: For historical reasons, a user agent MAY change - // the request method from POST to GET for the subsequent request. - var method = this._options.method; - if ((statusCode === 301 || statusCode === 302) && this._options.method === "POST" || - // RFC7231§6.4.4: The 303 (See Other) status code indicates that - // the server is redirecting the user agent to a different resource […] - // A user agent can perform a retrieval request targeting that URI - // (a GET or HEAD request if using HTTP) […] - (statusCode === 303) && !/^(?:GET|HEAD)$/.test(this._options.method)) { - this._options.method = "GET"; - // Drop a possible entity and headers related to it - this._requestBodyBuffers = []; - removeMatchingHeaders(/^content-/i, this._options.headers); - } - - // Drop the Host header, as the redirect might lead to a different host - var currentHostHeader = removeMatchingHeaders(/^host$/i, this._options.headers); - - // If the redirect is relative, carry over the host of the last request - var currentUrlParts = url.parse(this._currentUrl); - var currentHost = currentHostHeader || currentUrlParts.host; - var currentUrl = /^\w+:/.test(location) ? this._currentUrl : - url.format(Object.assign(currentUrlParts, { host: currentHost })); - - // Determine the URL of the redirection - var redirectUrl; - try { - redirectUrl = url.resolve(currentUrl, location); - } - catch (cause) { - this.emit("error", new RedirectionError({ cause: cause })); - return; - } - - // Create the redirected request - debug("redirecting to", redirectUrl); - this._isRedirect = true; - var redirectUrlParts = url.parse(redirectUrl); - Object.assign(this._options, redirectUrlParts); - - // Drop confidential headers when redirecting to a less secure protocol - // or to a different domain that is not a superdomain - if (redirectUrlParts.protocol !== currentUrlParts.protocol && - redirectUrlParts.protocol !== "https:" || - redirectUrlParts.host !== currentHost && - !isSubdomain(redirectUrlParts.host, currentHost)) { - removeMatchingHeaders(/^(?:authorization|cookie)$/i, this._options.headers); - } - - // Evaluate the beforeRedirect callback - if (isFunction(beforeRedirect)) { - var responseDetails = { - headers: response.headers, - statusCode: statusCode, - }; - var requestDetails = { - url: currentUrl, - method: method, - headers: requestHeaders, - }; - try { - beforeRedirect(this._options, responseDetails, requestDetails); - } - catch (err) { - this.emit("error", err); - return; - } - this._sanitizeOptions(this._options); - } - - // Perform the redirected request - try { - this._performRequest(); - } - catch (cause) { - this.emit("error", new RedirectionError({ cause: cause })); - } -}; - -// Wraps the key/value object of protocols with redirect functionality -function wrap(protocols) { - // Default settings - var exports = { - maxRedirects: 21, - maxBodyLength: 10 * 1024 * 1024, - }; - - // Wrap each protocol - var nativeProtocols = {}; - Object.keys(protocols).forEach(function (scheme) { - var protocol = scheme + ":"; - var nativeProtocol = nativeProtocols[protocol] = protocols[scheme]; - var wrappedProtocol = exports[scheme] = Object.create(nativeProtocol); - - // Executes a request, following redirects - function request(input, options, callback) { - // Parse parameters - if (isString(input)) { - var parsed; - try { - parsed = urlToOptions(new URL(input)); - } - catch (err) { - /* istanbul ignore next */ - parsed = url.parse(input); - } - if (!isString(parsed.protocol)) { - throw new InvalidUrlError({ input }); - } - input = parsed; - } - else if (URL && (input instanceof URL)) { - input = urlToOptions(input); - } - else { - callback = options; - options = input; - input = { protocol: protocol }; - } - if (isFunction(options)) { - callback = options; - options = null; - } - - // Set defaults - options = Object.assign({ - maxRedirects: exports.maxRedirects, - maxBodyLength: exports.maxBodyLength, - }, input, options); - options.nativeProtocols = nativeProtocols; - if (!isString(options.host) && !isString(options.hostname)) { - options.hostname = "::1"; - } - - assert.equal(options.protocol, protocol, "protocol mismatch"); - debug("options", options); - return new RedirectableRequest(options, callback); - } - - // Executes a GET request, following redirects - function get(input, options, callback) { - var wrappedRequest = wrappedProtocol.request(input, options, callback); - wrappedRequest.end(); - return wrappedRequest; - } - - // Expose the properties on the wrapped protocol - Object.defineProperties(wrappedProtocol, { - request: { value: request, configurable: true, enumerable: true, writable: true }, - get: { value: get, configurable: true, enumerable: true, writable: true }, - }); - }); - return exports; -} - -/* istanbul ignore next */ -function noop() { /* empty */ } - -// from https://github.com/nodejs/node/blob/master/lib/internal/url.js -function urlToOptions(urlObject) { - var options = { - protocol: urlObject.protocol, - hostname: urlObject.hostname.startsWith("[") ? - /* istanbul ignore next */ - urlObject.hostname.slice(1, -1) : - urlObject.hostname, - hash: urlObject.hash, - search: urlObject.search, - pathname: urlObject.pathname, - path: urlObject.pathname + urlObject.search, - href: urlObject.href, - }; - if (urlObject.port !== "") { - options.port = Number(urlObject.port); - } - return options; -} - -function removeMatchingHeaders(regex, headers) { - var lastValue; - for (var header in headers) { - if (regex.test(header)) { - lastValue = headers[header]; - delete headers[header]; - } - } - return (lastValue === null || typeof lastValue === "undefined") ? - undefined : String(lastValue).trim(); -} - -function createErrorType(code, message, baseClass) { - // Create constructor - function CustomError(properties) { - Error.captureStackTrace(this, this.constructor); - Object.assign(this, properties || {}); - this.code = code; - this.message = this.cause ? message + ": " + this.cause.message : message; - } - - // Attach constructor and set default properties - CustomError.prototype = new (baseClass || Error)(); - CustomError.prototype.constructor = CustomError; - CustomError.prototype.name = "Error [" + code + "]"; - return CustomError; -} - -function abortRequest(request) { - for (var event of events) { - request.removeListener(event, eventHandlers[event]); - } - request.on("error", noop); - request.abort(); -} - -function isSubdomain(subdomain, domain) { - assert(isString(subdomain) && isString(domain)); - var dot = subdomain.length - domain.length - 1; - return dot > 0 && subdomain[dot] === "." && subdomain.endsWith(domain); -} - -function isString(value) { - return typeof value === "string" || value instanceof String; -} - -function isFunction(value) { - return typeof value === "function"; -} - -function isBuffer(value) { - return typeof value === "object" && ("length" in value); -} - -// Exports -module.exports = wrap({ http: http, https: https }); -module.exports.wrap = wrap; diff --git a/node_modules/follow-redirects/package.json b/node_modules/follow-redirects/package.json deleted file mode 100644 index 97717c5..0000000 --- a/node_modules/follow-redirects/package.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "name": "follow-redirects", - "version": "1.15.2", - "description": "HTTP and HTTPS modules that follow redirects.", - "license": "MIT", - "main": "index.js", - "files": [ - "*.js" - ], - "engines": { - "node": ">=4.0" - }, - "scripts": { - "test": "npm run lint && npm run mocha", - "lint": "eslint *.js test", - "mocha": "nyc mocha" - }, - "repository": { - "type": "git", - "url": "git@github.com:follow-redirects/follow-redirects.git" - }, - "homepage": "https://github.com/follow-redirects/follow-redirects", - "bugs": { - "url": "https://github.com/follow-redirects/follow-redirects/issues" - }, - "keywords": [ - "http", - "https", - "url", - "redirect", - "client", - "location", - "utility" - ], - "author": "Ruben Verborgh (https://ruben.verborgh.org/)", - "contributors": [ - "Olivier Lalonde (http://www.syskall.com)", - "James Talmage " - ], - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/RubenVerborgh" - } - ], - "peerDependenciesMeta": { - "debug": { - "optional": true - } - }, - "devDependencies": { - "concat-stream": "^2.0.0", - "eslint": "^5.16.0", - "express": "^4.16.4", - "lolex": "^3.1.0", - "mocha": "^6.0.2", - "nyc": "^14.1.1" - } -} diff --git a/package-lock.json b/package-lock.json index beb5667..bf5a406 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,16 +1,15 @@ { "name": "pacman-tool-repo", - "version": "1.1", + "version": "2.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "pacman-tool-repo", - "version": "1.1", + "version": "2.0", "license": "MIT", "dependencies": { - "@actions/exec": "^1.1.1", - "follow-redirects": "^1.15.2" + "@actions/exec": "^1.1.1" } }, "node_modules/@actions/exec": { @@ -25,25 +24,6 @@ "version": "1.1.3", "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz", "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==" - }, - "node_modules/follow-redirects": { - "version": "1.15.2", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz", - "integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==", - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/RubenVerborgh" - } - ], - "engines": { - "node": ">=4.0" - }, - "peerDependenciesMeta": { - "debug": { - "optional": true - } - } } } } diff --git a/package.json b/package.json index 6748953..a2f6604 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "pacman-tool-repo", - "description": "tools for working with pacman db of repo", - "version": "1.1", + "version": "2.0", + "description": "Tools for working with pacman db of repo.", "main": "index.js", "scripts": { "test": "echo \"Error: no test specified\" && exit 1" @@ -10,15 +10,19 @@ "type": "git", "url": "git+https://github.com/termux-pacman/pacman-tool-repo.git" }, - "keywords": ["repo-add", "repo-remove", "pacman"], - "author": "Termux Pacman", + "keywords": [ + "actions", + "repo-add", + "repo-remove", + "pacman" + ], + "author": "termux-pacman", "license": "MIT", "bugs": { "url": "https://github.com/termux-pacman/pacman-tool-repo/issues" }, "homepage": "https://github.com/termux-pacman/pacman-tool-repo#readme", "dependencies": { - "@actions/exec": "^1.1.1", - "follow-redirects": "^1.15.2" + "@actions/exec": "^1.1.1" } } diff --git a/pacman-tool-repo.tar.xz b/pacman-tool-repo.tar.xz deleted file mode 100644 index eaad32a..0000000 Binary files a/pacman-tool-repo.tar.xz and /dev/null differ diff --git a/pacman-tool-repo/repo-add b/pacman-tool-repo/repo-add deleted file mode 100755 index a1c756b..0000000 --- a/pacman-tool-repo/repo-add +++ /dev/null @@ -1,688 +0,0 @@ -#!/bin/bash -# -# repo-add - add a package to a given repo database file -# repo-remove - remove a package entry from a given repo database file -# -# Copyright (c) 2006-2021 Pacman Development Team -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -shopt -s extglob - -# gettext initialization -export TEXTDOMAIN='pacman-scripts' -export TEXTDOMAINDIR='/usr/share/locale' -export UTILDIR="/usr/share/pacman" - -declare -r myver='6.0.2' -declare -r confdir='/etc' - -QUIET=0 -ONLYADDNEW=0 -RMEXISTING=0 -SIGN=0 -KEY=0 -VERIFY=0 -REPO_DB_FILE= -REPO_DB_PREFIX= -REPO_DB_SUFFIX= -LOCKFILE= -CLEAN_LOCK=0 -USE_COLOR='y' -PREVENT_DOWNGRADE=0 - -# Import libmakepkg -source $UTILDIR/compress.sh -source $UTILDIR/message.sh - -# ensure we have a sane umask set -umask 0022 - -# print usage instructions -usage() { - cmd=${0##*/} - printf -- "%s (pacman) %s\n\n" "$cmd" "$myver" - if [[ $cmd == "repo-add" ]] ; then - printf -- "$(gettext "Usage: repo-add [options] ...\n")" - printf -- "\n" - printf -- "$(gettext "\ -repo-add will update a package database by reading a package file.\n\ -Multiple packages to add can be specified on the command line.\n")" - printf -- "\n" - printf -- "$(gettext "Options:\n")" - printf -- "$(gettext " -n, --new only add packages that are not already in the database\n")" - printf -- "$(gettext " -R, --remove remove old package file from disk after updating database\n")" - printf -- "$(gettext " -p, --prevent-downgrade do not add package to database if a newer version is already present\n")" - elif [[ $cmd == "repo-remove" ]] ; then - printf -- "$(gettext "Usage: repo-remove [options] ...\n")" - printf -- "\n" - printf -- "$(gettext "\ -repo-remove will update a package database by removing the package name\n\ -specified on the command line from the given repo database. Multiple\n\ -packages to remove can be specified on the command line.\n")" - printf -- "\n" - printf -- "$(gettext "Options:\n")" - else - printf -- "$(gettext "Please move along, there is nothing to see here.\n")" - return - fi - printf -- "$(gettext " --nocolor turn off color in output\n")" - printf -- "$(gettext " -q, --quiet minimize output\n")" - printf -- "$(gettext " -s, --sign sign database with GnuPG after update\n")" - printf -- "$(gettext " -k, --key use the specified key to sign the database\n")" - printf -- "$(gettext " -v, --verify verify database's signature before update\n")" - printf -- "$(gettext "\n\ -See %s(8) for more details and descriptions of the available options.\n")" $cmd - printf "\n" - if [[ $cmd == "repo-add" ]] ; then - printf -- "$(gettext "Example: repo-add /path/to/repo.db.tar.gz pacman-3.0.0-1-i686.pkg.tar.gz\n")" - elif [[ $cmd == "repo-remove" ]] ; then - printf -- "$(gettext "Example: repo-remove /path/to/repo.db.tar.gz kernel26\n")" - fi -} - -version() { - cmd=${0##*/} - printf "%s (pacman) %s\n\n" "$cmd" "$myver" - printf -- "Copyright (c) 2006-2021 Pacman Development Team .\n" - printf '\n' - printf -- "$(gettext "\ -This is free software; see the source for copying conditions.\n\ -There is NO WARRANTY, to the extent permitted by law.\n")" -} - - -# format a metadata entry -# arg1 - Entry name -# ... - value(s) -format_entry() { - local field=$1; shift - - if [[ $1 ]]; then - printf '%%%s%%\n' "$field" - printf '%s\n' "$@" - printf '\n' - fi -} - -find_pkgentry() { - local pkgname=$1 - local pkgentry - - for pkgentry in "$tmpdir/db/$pkgname"*; do - name=${pkgentry##*/} - if [[ ${name%-*-*} = "$pkgname" ]]; then - echo $pkgentry - return 0 - fi - done - return 1 -} - -check_gpg() { - if ! type -p gpg >/dev/null; then - error "$(gettext "Cannot find the gpg binary! Is GnuPG installed?")" - exit 1 # $E_MISSING_PROGRAM - fi - - if (( ! VERIFY )); then - if ! gpg --list-secret-key ${GPGKEY:+"$GPGKEY"} &>/dev/null; then - if [[ ! -z $GPGKEY ]]; then - error "$(gettext "The key ${GPGKEY} does not exist in your keyring.")" - elif (( ! KEY )); then - error "$(gettext "There is no key in your keyring.")" - fi - exit 1 - fi - fi -} - -# sign the package database once repackaged -create_signature() { - (( ! SIGN )) && return - local dbfile=$1 - local ret=0 - msg "$(gettext "Signing database '%s'...")" "${dbfile##*/.tmp.}" - - local SIGNWITHKEY=() - if [[ -n $GPGKEY ]]; then - SIGNWITHKEY=(-u "${GPGKEY}") - fi - gpg --detach-sign --use-agent --no-armor "${SIGNWITHKEY[@]}" "$dbfile" &>/dev/null || ret=$? - - if (( ! ret )); then - msg2 "$(gettext "Created signature file '%s'")" "${dbfile##*/.tmp.}.sig" - else - warning "$(gettext "Failed to sign package database file '%s'")" "${dbfile##*/.tmp.}" - fi -} - -# verify the existing package database signature -verify_signature() { - (( ! VERIFY )) && return - local dbfile=$1 - local ret=0 - msg "$(gettext "Verifying database signature...")" - - if [[ ! -f $dbfile.sig ]]; then - warning "$(gettext "No existing signature found, skipping verification.")" - return - fi - gpg --verify "$dbfile.sig" || ret=$? - if (( ! ret )); then - msg2 "$(gettext "Database signature file verified.")" - else - error "$(gettext "Database signature was NOT valid!")" - exit 1 - fi -} - -verify_repo_extension() { - local junk=() - if [[ $1 = *.db.tar* ]] && get_compression_command "$1" junk; then - return 0 - fi - - error "$(gettext "'%s' does not have a valid database archive extension.")" "$1" - exit 1 -} - -# write an entry to the pacman database -# arg1 - path to package -db_write_entry() { - # blank out all variables - local pkgfile=$1 - local -a _groups _licenses _replaces _depends _conflicts _provides \ - _optdepends _makedepends _checkdepends - local pkgname pkgbase pkgver pkgdesc csize size url arch builddate packager \ - md5sum sha256sum pgpsig pgpsigsize - - # read info from the zipped package - local line var val - while read -r line; do - [[ ${line:0:1} = '#' ]] && continue - IFS=' =' read -r var val < <(printf '%s\n' "$line") - - # normalize whitespace with an extglob - declare "$var=${val//+([[:space:]])/ }" - case $var in - group) _groups+=("$group") ;; - license) _licenses+=("$license") ;; - replaces) _replaces+=("$replaces") ;; - depend) _depends+=("$depend") ;; - conflict) _conflicts+=("$conflict") ;; - provides) _provides+=("$provides") ;; - optdepend) _optdepends+=("$optdepend") ;; - makedepend) _makedepends+=("$makedepend") ;; - checkdepend) _checkdepends+=("$checkdepend") ;; - esac - done< <(bsdtar -xOqf "$pkgfile" .PKGINFO) - - # ensure $pkgname and $pkgver variables were found - if [[ -z $pkgname || -z $pkgver ]]; then - error "$(gettext "Invalid package file '%s'.")" "$pkgfile" - return 1 - fi - - if [[ -d $tmpdir/db/$pkgname-$pkgver ]]; then - warning "$(gettext "An entry for '%s' already existed")" "$pkgname-$pkgver" - if (( ONLYADDNEW )); then - return 0 - fi - else - pkgentry=$(find_pkgentry "$pkgname") - if [[ -n $pkgentry ]]; then - - local version=$(sed -n '/^%VERSION%$/ {n;p;q}' "$pkgentry/desc") - if (( $(vercmp "$version" "$pkgver") > 0 )); then - warning "$(gettext "A newer version for '%s' is already present in database")" "$pkgname" - if (( PREVENT_DOWNGRADE )); then - return 0 - fi - fi - if (( RMEXISTING )); then - local oldfilename="$(sed -n '/^%FILENAME%$/ {n;p;q;}' "$pkgentry/desc")" - local oldfile="$(dirname "$1")/$oldfilename" - fi - fi - fi - - # compute base64'd PGP signature - if [[ -f "$pkgfile.sig" ]]; then - if grep -q 'BEGIN PGP SIGNATURE' "$pkgfile.sig"; then - error "$(gettext "Cannot use armored signatures for packages: %s")" "$pkgfile.sig" - return 1 - fi - pgpsigsize=$(wc -c < "$pkgfile.sig") - if (( pgpsigsize > 16384 )); then - error "$(gettext "Invalid package signature file '%s'.")" "$pkgfile.sig" - return 1 - fi - msg2 "$(gettext "Adding package signature...")" - pgpsig=$(base64 "$pkgfile.sig" | tr -d '\n') - fi - - csize=$(wc -c < "$pkgfile") - - # compute checksums - md5sum=$(md5sum "$pkgfile") - md5sum=${md5sum%% *} - sha256sum=$(sha256sum "$pkgfile") - sha256sum=${sha256sum%% *} - - # remove an existing entry if it exists, ignore failures - db_remove_entry "$pkgname" - - # create package directory - pushd "$tmpdir/db" >/dev/null - mkdir "$pkgname-$pkgver" - pushd "$pkgname-$pkgver" >/dev/null - - # create desc entry - { - format_entry "FILENAME" "${1##*/}" - format_entry "NAME" "$pkgname" - format_entry "BASE" "$pkgbase" - format_entry "VERSION" "$pkgver" - format_entry "DESC" "$pkgdesc" - format_entry "GROUPS" "${_groups[@]}" - format_entry "CSIZE" "$csize" - format_entry "ISIZE" "$size" - - # add checksums - format_entry "MD5SUM" "$md5sum" - format_entry "SHA256SUM" "$sha256sum" - - # add PGP sig - format_entry "PGPSIG" "$pgpsig" - - format_entry "URL" "$url" - format_entry "LICENSE" "${_licenses[@]}" - format_entry "ARCH" "$arch" - format_entry "BUILDDATE" "$builddate" - format_entry "PACKAGER" "$packager" - format_entry "REPLACES" "${_replaces[@]}" - format_entry "CONFLICTS" "${_conflicts[@]}" - format_entry "PROVIDES" "${_provides[@]}" - - format_entry "DEPENDS" "${_depends[@]}" - format_entry "OPTDEPENDS" "${_optdepends[@]}" - format_entry "MAKEDEPENDS" "${_makedepends[@]}" - format_entry "CHECKDEPENDS" "${_checkdepends[@]}" - } >'desc' - - popd >/dev/null - popd >/dev/null - - # copy updated package entry into "files" database - cp -a "$tmpdir/db/$pkgname-$pkgver" "$tmpdir/files/$pkgname-$pkgver" - - # create files file - local files_path="$tmpdir/files/$pkgname-$pkgver/files" - echo "%FILES%" >"$files_path" - bsdtar --exclude='^.*' -tf "$pkgfile" | LC_ALL=C sort -u >>"$files_path" - - if (( RMEXISTING )); then - msg2 "$(gettext "Removing old package file '%s'")" "$oldfilename" - rm -f ${oldfile} ${oldfile}.sig - fi - - return 0 -} # end db_write_entry - -# remove existing entries from the DB -# arg1 - package name -db_remove_entry() { - local pkgname=$1 - local notfound=1 - local pkgentry=$(find_pkgentry "$pkgname") - while [[ -n $pkgentry ]]; do - notfound=0 - - msg2 "$(gettext "Removing existing entry '%s'...")" \ - "${pkgentry##*/}" - rm -rf "$pkgentry" - - # remove entries in "files" database - local filesentry=$(echo "$pkgentry" | sed 's/\(.*\)\/db\//\1\/files\//') - rm -rf "$filesentry" - - pkgentry=$(find_pkgentry "$pkgname") - done - return $notfound -} # end db_remove_entry - -elephant() { - case $(( RANDOM % 2 )) in - 0) printf '%s\n' "H4sIAL3qBE4CAyWLwQ3AMAgD/0xh5UPzYiFUMgjq7LUJsk7yIQNAQTAikFUDnqkr" \ - "OQFOUm0Wd9pHCi13ONjBpVdqcWx+EdXVX4vXvGv5cgztB9+fJxZ7AAAA" - ;; - - 1) printf '%s\n' "H4sIAJVWBU4CA21RMQ7DIBDbeYWrDgQJ7rZ+IA/IB05l69alcx5fc0ASVXUk4jOO" \ - "7yAAUWtorygwJ4hlMii0YkJKKRKGvsMsiykl1SalvrMD1gUXyXRkGZPx5OPft81K" \ - "tNAiAjyGjYO47h1JjizPkJrCWbK/4C+uLkT7bzpGc7CT9bmOzNSW5WLSO5vexjmH" \ - "ZL9JFFZeAa0a2+lKjL2anpYfV+0Zx9LJ+/MC8nRayuDlSNy2rfAPibOzsiWHL0jL" \ - "SsjFAQAA" - ;; - esac | base64 -d | gzip -d -} - -prepare_repo_db() { - local repodir dbfile - - # ensure the path to the DB exists; $LOCKFILE is always an absolute path - repodir=${LOCKFILE%/*}/ - - if [[ ! -d $repodir ]]; then - error "$(gettext "%s does not exist or is not a directory.")" "$repodir" - exit 1 - fi - - # check lock file - if ( set -o noclobber; echo "$$" > "$LOCKFILE") 2> /dev/null; then - CLEAN_LOCK=1 - else - error "$(gettext "Failed to acquire lockfile: %s.")" "$LOCKFILE" - [[ -f $LOCKFILE ]] && error "$(gettext "Held by process %s")" "$(cat "$LOCKFILE")" - exit 1 - fi - - for repo in "db" "files"; do - dbfile=${repodir}/$REPO_DB_PREFIX.$repo.$REPO_DB_SUFFIX - - if [[ -f $dbfile ]]; then - # there are two situations we can have here: - # a DB with some entries, or a DB with no contents at all. - if ! bsdtar -tqf "$dbfile" '*/desc' >/dev/null 2>&1; then - # check empty case - if [[ -n $(bsdtar -tqf "$dbfile" '*' 2>/dev/null) ]]; then - error "$(gettext "Repository file '%s' is not a proper pacman database.")" "$dbfile" - exit 1 - fi - fi - verify_signature "$dbfile" - msg "$(gettext "Extracting %s to a temporary location...")" "${dbfile##*/}" - bsdtar -xf "$dbfile" -C "$tmpdir/$repo" - else - case $cmd in - repo-remove) - # only a missing "db" database is currently an error - # TODO: remove if statement - if [[ $repo == "db" ]]; then - error "$(gettext "Repository file '%s' was not found.")" "$dbfile" - exit 1 - fi - ;; - repo-add) - # check if the file can be created (write permission, directory existence, etc) - if ! touch "$dbfile"; then - error "$(gettext "Repository file '%s' could not be created.")" "$dbfile" - exit 1 - fi - rm -f "$dbfile" - ;; - esac - fi - done -} - -add() { - if [[ ! -f $1 ]]; then - error "$(gettext "File '%s' not found.")" "$1" - return 1 - fi - - pkgfile=$1 - if ! bsdtar -tqf "$pkgfile" .PKGINFO >/dev/null 2>&1; then - error "$(gettext "'%s' is not a package file, skipping")" "$pkgfile" - return 1 - fi - - msg "$(gettext "Adding package '%s'")" "$pkgfile" - - db_write_entry "$pkgfile" -} - -remove() { - pkgname=$1 - msg "$(gettext "Searching for package '%s'...")" "$pkgname" - - if ! db_remove_entry "$pkgname"; then - error "$(gettext "Package matching '%s' not found.")" "$pkgname" - return 1 - fi - - return 0 -} - -rotate_db() { - dirname=${LOCKFILE%/*} - - pushd "$dirname" >/dev/null - - for repo in "db" "files"; do - filename=${REPO_DB_PREFIX}.${repo}.${REPO_DB_SUFFIX} - tempname=$dirname/.tmp.$filename - - # hardlink or move the previous version of the database and signature to .old - # extension as a backup measure - if [[ -f $filename ]]; then - ln -f "$filename" "$filename.old" 2>/dev/null || \ - mv -f "$filename" "$filename.old" - - if [[ -f $filename.sig ]]; then - ln -f "$filename.sig" "$filename.old.sig" 2>/dev/null || \ - mv -f "$filename.sig" "$filename.old.sig" - else - rm -f "$filename.old.sig" - fi - fi - - # rotate the newly-created database and signature into place - mv "$tempname" "$filename" - if [[ -f $tempname.sig ]]; then - mv "$tempname.sig" "$filename.sig" - fi - - dblink=${filename%.tar*} - rm -f "$dblink" "$dblink.sig" - ln -s "$filename" "$dblink" 2>/dev/null || \ - ln "$filename" "$dblink" 2>/dev/null || \ - cp "$filename" "$dblink" - if [[ -f "$filename.sig" ]]; then - ln -s "$filename.sig" "$dblink.sig" 2>/dev/null || \ - ln "$filename.sig" "$dblink.sig" 2>/dev/null || \ - cp "$filename.sig" "$dblink.sig" - fi - done - - popd >/dev/null -} - -create_db() { - # $LOCKFILE is already guaranteed to be absolute so this is safe - dirname=${LOCKFILE%/*} - - for repo in "db" "files"; do - filename=${REPO_DB_PREFIX}.${repo}.${REPO_DB_SUFFIX} - # this ensures we create it on the same filesystem, making moves atomic - tempname=$dirname/.tmp.$filename - - pushd "$tmpdir/$repo" >/dev/null - local files=(*) - if [[ ${files[*]} = '*' ]]; then - # we have no packages remaining? zip up some emptyness - warning "$(gettext "No packages remain, creating empty database.")" - files=(-T /dev/null) - fi - bsdtar -cf - "${files[@]}" | compress_as "$filename" > "$tempname" - popd >/dev/null - - create_signature "$tempname" - done -} - -trap_exit() { - # unhook all traps to avoid race conditions - trap '' EXIT TERM HUP QUIT INT ERR - - echo - error "$@" - clean_up 1 -} - -clean_up() { - local exit_code=${1:-$?} - - # unhook all traps to avoid race conditions - trap '' EXIT TERM HUP QUIT INT ERR - - [[ -d $tmpdir ]] && rm -rf "$tmpdir" - (( CLEAN_LOCK )) && [[ -f $LOCKFILE ]] && rm -f "$LOCKFILE" - - exit $exit_code -} - - -# PROGRAM START - -# determine whether we have gettext; make it a no-op if we do not -if ! type gettext &>/dev/null; then - gettext() { - echo "$@" - } -fi - -case $1 in - -h|--help) usage; exit 0;; - -V|--version) version; exit 0;; -esac - -# figure out what program we are -cmd=${0##*/} -if [[ $cmd == "repo-elephant" ]]; then - elephant - exit 0 -fi - -if [[ $cmd != "repo-add" && $cmd != "repo-remove" ]]; then - error "$(gettext "Invalid command name '%s' specified.")" "$cmd" - exit 1 -fi - -tmpdir=$(mktemp -d "${TMPDIR:-/tmp}/repo-tools.XXXXXXXXXX") || (\ - error "$(gettext "Cannot create temp directory for database building.")"; \ - exit 1) - -for repo in "db" "files"; do - mkdir "$tmpdir/$repo" -done - -trap 'clean_up' EXIT -for signal in TERM HUP QUIT; do - trap "trap_exit \"$(gettext "%s signal caught. Exiting...")\" \"$signal\"" "$signal" -done -trap 'trap_exit "$(gettext "Aborted by user! Exiting...")"' INT -trap 'trap_exit "$(gettext "An unknown error has occurred. Exiting...")"' ERR - -declare -a args -# parse arguments -while (( $# )); do - case $1 in - -q|--quiet) QUIET=1;; - -n|--new) ONLYADDNEW=1;; - -R|--remove) RMEXISTING=1;; - --nocolor) USE_COLOR='n';; - -s|--sign) - SIGN=1 - ;; - -k|--key) - KEY=1 - shift - GPGKEY=$1 - ;; - -v|--verify) - VERIFY=1 - ;; - -p|--prevent-downgrade) - PREVENT_DOWNGRADE=1 - ;; - *) - args+=("$1") - ;; - esac - shift -done - -# check if messages are to be printed using color -if [[ -t 2 && $USE_COLOR != "n" ]]; then - colorize -else - unset ALL_OFF BOLD BLUE GREEN RED YELLOW -fi - -REPO_DB_FILE=${args[0]} -if [[ -z $REPO_DB_FILE ]]; then - usage - exit 1 -fi - -if [[ $REPO_DB_FILE == /* ]]; then - LOCKFILE=$REPO_DB_FILE.lck -else - LOCKFILE=$PWD/$REPO_DB_FILE.lck -fi - -verify_repo_extension "$REPO_DB_FILE" - -REPO_DB_PREFIX=${REPO_DB_FILE##*/} -REPO_DB_PREFIX=${REPO_DB_PREFIX%.db.*} -REPO_DB_SUFFIX=${REPO_DB_FILE##*.db.} - -if (( SIGN || VERIFY )); then - check_gpg -fi - -if (( VERIFY && ${#args[@]} == 1 )); then - for repo in "db" "files"; do - dbfile=${repodir}/$REPO_DB_PREFIX.$repo.$REPO_DB_SUFFIX - - if [[ -f $dbfile ]]; then - verify_signature "$dbfile" - fi - done - exit 0 -fi - -prepare_repo_db - -fail=0 -for arg in "${args[@]:1}"; do - case $cmd in - repo-add) add "$arg" ;; - repo-remove) remove "$arg" ;; - esac || fail=1 -done - -# if the whole operation was a success, re-zip and rotate databases -if (( !fail )); then - msg "$(gettext "Creating updated database file '%s'")" "$REPO_DB_FILE" - create_db - rotate_db -else - msg "$(gettext "No packages modified, nothing to do.")" - exit 1 -fi - -exit 0 diff --git a/pacman-tool-repo/setup.sh b/pacman-tool-repo/setup.sh deleted file mode 100755 index 9b1e356..0000000 --- a/pacman-tool-repo/setup.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash - -# Installing dependencies for the repo-add and repo-remove commands -if ! $(dpkg -l libarchive-tools 2>/dev/null); then - apt-get update - while true; do - { - apt-get install libarchive-tools -y - break - } || { - apt-get update - } - done -fi - -# Commands settings -if [ ! -d /usr/share/pacman ]; then - mkdir -p /usr/share/pacman - cp ./util/* /usr/share/pacman - chmod 777 /usr/share/pacman/* -fi -if [ ! -f /bin/repo-add ]; then - cp ./repo-add /bin/repo-add - chmod 777 /bin/repo-add -fi -if [ ! -f /bin/repo-remove ]; then - ln -s /bin/repo-add /bin/repo-remove -fi diff --git a/pacman-tool-repo/util/compress.sh b/pacman-tool-repo/util/compress.sh deleted file mode 100644 index 7b2ae57..0000000 --- a/pacman-tool-repo/util/compress.sh +++ /dev/null @@ -1,80 +0,0 @@ -#!/bin/bash -# -# compress.sh - functions to compress archives in a uniform manner -# -# Copyright (c) 2017-2021 Pacman Development Team -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# - -[[ -n "$LIBMAKEPKG_UTIL_COMPRESS_SH" ]] && return -LIBMAKEPKG_UTIL_COMPRESS_SH=1 - -export UTILDIR="/usr/share/pacman" - -source $UTILDIR/message.sh -source $UTILDIR/pkgbuild.sh - - -# Wrapper around many stream compression formats, for use in the middle of a -# pipeline. A tar archive is passed on stdin and compressed to stdout. -compress_as() { - # $1: final archive filename extension for compression type detection - - local cmd ext=${1#${1%.tar*}} - - if ! get_compression_command "$ext" cmd; then - warning "$(gettext "'%s' is not a valid archive extension.")" "${ext:-${1##*/}}" - cat - else - "${cmd[@]}" - fi -} - -# Retrieve the compression command for an archive extension, or cat for .tar, -# and save it to an existing array name. If the extension cannot be found, -# clear the array and return failure. -get_compression_command() { - local extarray ext=$1 outputvar=$2 - local resolvecmd=() fallback=() - - case "$ext" in - *.tar.gz) fallback=(gzip -c -f -n) ;; - *.tar.bz2) fallback=(bzip2 -c -f) ;; - *.tar.xz) fallback=(xz -c -z -) ;; - *.tar.zst) fallback=(zstd -c -z -q -) ;; - *.tar.lrz) fallback=(lrzip -q) ;; - *.tar.lzo) fallback=(lzop -q) ;; - *.tar.Z) fallback=(compress -c -f) ;; - *.tar.lz4) fallback=(lz4 -q) ;; - *.tar.lz) fallback=(lzip -c -f) ;; - *.tar) fallback=(cat) ;; - # do not respect unknown COMPRESS* env vars - *) array_build "$outputvar" resolvecmd; return 1 ;; - esac - - ext=${ext#*.tar.} - # empty the variable for plain tar archives so we fallback to cat - ext=${ext#*.tar} - - if [[ -n $ext ]]; then - extarray="COMPRESS${ext^^}[@]" - resolvecmd=("${!extarray}") - fi - if (( ${#resolvecmd[@]} == 0 )); then - resolvecmd=("${fallback[@]}") - fi - - array_build "$outputvar" resolvecmd -} diff --git a/pacman-tool-repo/util/error.sh b/pacman-tool-repo/util/error.sh deleted file mode 100644 index 4e1fb03..0000000 --- a/pacman-tool-repo/util/error.sh +++ /dev/null @@ -1,41 +0,0 @@ -#!/bin/bash -# -# error.sh.in - error variable definitions for makepkg -# -# Copyright (c) 2006-2021 Pacman Development Team -# Copyright (c) 2002-2006 by Judd Vinet -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# - -[[ -n "$LIBMAKEPKG_UTIL_ERROR_SH" ]] && return -LIBMAKEPKG_UTIL_ERROR_SH=1 - -E_OK=0 -E_FAIL=1 # Generic error -E_CONFIG_ERROR=2 -E_INVALID_OPTION=3 -E_USER_FUNCTION_FAILED=4 -E_PACKAGE_FAILED=5 -E_MISSING_FILE=6 -E_MISSING_PKGDIR=7 -E_INSTALL_DEPS_FAILED=8 -E_REMOVE_DEPS_FAILED=9 -E_ROOT=10 -E_FS_PERMISSIONS=11 -E_PKGBUILD_ERROR=12 -E_ALREADY_BUILT=13 -E_INSTALL_FAILED=14 -E_MISSING_MAKEPKG_DEPS=15 -E_PRETTY_BAD_PRIVACY=16 diff --git a/pacman-tool-repo/util/message.sh b/pacman-tool-repo/util/message.sh deleted file mode 100644 index 3df21fc..0000000 --- a/pacman-tool-repo/util/message.sh +++ /dev/null @@ -1,85 +0,0 @@ -#!/bin/bash -# -# message.sh - functions for outputting messages in makepkg -# -# Copyright (c) 2006-2021 Pacman Development Team -# Copyright (c) 2002-2006 by Judd Vinet -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# - -[[ -n "$LIBMAKEPKG_UTIL_MESSAGE_SH" ]] && return -LIBMAKEPKG_UTIL_MESSAGE_SH=1 - - -colorize() { - # prefer terminal safe colored and bold text when tput is supported - if tput setaf 0 &>/dev/null; then - ALL_OFF="$(tput sgr0)" - BOLD="$(tput bold)" - BLUE="${BOLD}$(tput setaf 4)" - GREEN="${BOLD}$(tput setaf 2)" - RED="${BOLD}$(tput setaf 1)" - YELLOW="${BOLD}$(tput setaf 3)" - else - ALL_OFF="\e[0m" - BOLD="\e[1m" - BLUE="${BOLD}\e[34m" - GREEN="${BOLD}\e[32m" - RED="${BOLD}\e[31m" - YELLOW="${BOLD}\e[33m" - fi - readonly ALL_OFF BOLD BLUE GREEN RED YELLOW -} - -# plainerr/plainerr are primarily used to continue a previous message on a new -# line, depending on whether the first line is a regular message or an error -# output - -plain() { - (( QUIET )) && return - local mesg=$1; shift - printf "${BOLD} ${mesg}${ALL_OFF}\n" "$@" -} - -plainerr() { - plain "$@" >&2 -} - -msg() { - (( QUIET )) && return - local mesg=$1; shift - printf "${GREEN}==>${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" -} - -msg2() { - (( QUIET )) && return - local mesg=$1; shift - printf "${BLUE} ->${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" -} - -ask() { - local mesg=$1; shift - printf "${BLUE}::${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}" "$@" -} - -warning() { - local mesg=$1; shift - printf "${YELLOW}==> $(gettext "WARNING:")${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2 -} - -error() { - local mesg=$1; shift - printf "${RED}==> $(gettext "ERROR:")${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2 -} diff --git a/pacman-tool-repo/util/pkgbuild.sh b/pacman-tool-repo/util/pkgbuild.sh deleted file mode 100644 index c317d83..0000000 --- a/pacman-tool-repo/util/pkgbuild.sh +++ /dev/null @@ -1,268 +0,0 @@ -#!/bin/bash -# -# pkgbuild.sh - functions to extract information from PKGBUILD files -# -# Copyright (c) 2009-2021 Pacman Development Team -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# - -[[ -n "$LIBMAKEPKG_UTIL_PKGBUILD_SH" ]] && return -LIBMAKEPKG_UTIL_PKGBUILD_SH=1 - -export UTILDIR="/usr/share/pacman" - -source $UTILDIR/schema.sh - - -have_function() { - declare -f "$1" >/dev/null -} - -grep_function() { - { declare -f "$1" || declare -f package; } 2>/dev/null | grep -E "$2" -} - -array_build() { - local dest=$1 src=$2 i keys values - - # it's an error to try to copy a value which doesn't exist. - declare -p "$2" &>/dev/null || return 1 - - # Build an array of the indices of the source array. - eval "keys=(\"\${!$2[@]}\")" - - # Clear the destination array - eval "$dest=()" - - # Read values indirectly via their index. This approach gives us support - # for associative arrays, sparse arrays, and empty strings as elements. - for i in "${keys[@]}"; do - values+=("printf -v '$dest[$i]' %s \"\${$src[$i]}\";") - done - - eval "${values[*]}" -} - -extract_global_variable() { - # $1: variable name - # $2: multivalued - # $3: name of output var - - local attr=$1 isarray=$2 outputvar=$3 ref - - if (( isarray )); then - array_build ref "$attr" - (( ${#ref[@]} )) && array_build "$outputvar" "$attr" - else - [[ ${!attr} ]] && printf -v "$outputvar" %s "${!attr}" - fi -} - -extract_function_variable() { - # $1: function name - # $2: variable name - # $3: multivalued - # $4: name of output var - - local funcname=$1 attr=$2 isarray=$3 outputvar=$4 attr_regex= decl= r=1 - - if (( isarray )); then - printf -v attr_regex '^[[:space:]]* %s\+?=\(' "$2" - else - printf -v attr_regex '^[[:space:]]* %s\+?=[^(]' "$2" - fi - - # this function requires extglob - save current status to restore later - local shellopts=$(shopt -p extglob) - shopt -s extglob - - while read -r; do - # strip leading whitespace and any usage of declare - decl=${REPLY##*([[:space:]])} - eval "${decl/#$attr/$outputvar}" - - # entering this loop at all means we found a match, so notify the caller. - r=0 - done < <(grep_function "$funcname" "$attr_regex") - - eval "$shellopts" - - return $r -} - -exists_function_variable() { - # $1: function name - # $2: variable name - - local funcname=$1 attr=$2 out - extract_function_variable "$funcname" "$attr" 0 out || - extract_function_variable "$funcname" "$attr" 1 out -} - -get_pkgbuild_attribute() { - # $1: package name - # $2: attribute name - # $3: multivalued - # $4: name of output var - - local pkgname=$1 attrname=$2 isarray=$3 outputvar=$4 - - if (( isarray )); then - eval "$outputvar=()" - else - printf -v "$outputvar" %s '' - fi - - if [[ $pkgname ]]; then - extract_global_variable "$attrname" "$isarray" "$outputvar" - extract_function_variable "package_$pkgname" "$attrname" "$isarray" "$outputvar" - else - extract_global_variable "$attrname" "$isarray" "$outputvar" - fi -} - -get_pkgbuild_all_split_attributes() { - local attrname=$1 outputvar=$2 all_list list - - if extract_global_variable "$attrname" 1 list; then - all_list+=("${list[@]}") - fi - for a in "${arch[@]}"; do - if extract_global_variable "${attrname}_$a" 1 list; then - all_list+=("${list[@]}") - fi - done - - for name in "${pkgname[@]}"; do - if extract_function_variable "package_$name" "$attrname" 1 list; then - all_list+=("${list[@]}") - fi - - for a in "${arch[@]}"; do - if extract_function_variable "package_$name" "${attrname}_$a" 1 list; then - all_list+=("${list[@]}") - fi - done - done - - (( ${#all_list[@]} )) && array_build "$outputvar" all_list -} - -## -# usage : get_full_version() -# return : full version spec, including epoch (if necessary), pkgver, pkgrel -## -get_full_version() { - if (( epoch > 0 )); then - printf "%s\n" "$epoch:$pkgver-$pkgrel" - else - printf "%s\n" "$pkgver-$pkgrel" - fi -} - -## -# usage : get_pkg_arch( [$pkgname] ) -# return : architecture of the package -## -get_pkg_arch() { - if [[ -z $1 ]]; then - if [[ $arch = "any" ]]; then - printf "%s\n" "any" - else - printf "%s\n" "$CARCH" - fi - else - local arch_override - get_pkgbuild_attribute "$1" arch 1 arch_override - (( ${#arch_override[@]} == 0 )) && arch_override=("${arch[@]}") - if [[ $arch_override = "any" ]]; then - printf "%s\n" "any" - else - printf "%s\n" "$CARCH" - fi - fi -} - -print_all_package_names() { - local version=$(get_full_version) - local architecture pkg opts a - for pkg in ${pkgname[@]}; do - architecture=$(get_pkg_arch $pkg) - printf "%s/%s-%s-%s%s\n" "$PKGDEST" "$pkg" "$version" "$architecture" "$PKGEXT" - done - if check_option "debug" "y" && check_option "strip" "y"; then - architecture=$(get_pkg_arch) - printf "%s/%s-%s-%s-%s%s\n" "$PKGDEST" "$pkgbase" "debug" "$version" "$architecture" "$PKGEXT" - fi -} - -get_all_sources() { - local aggregate l a - - if array_build l 'source'; then - aggregate+=("${l[@]}") - fi - - for a in "${arch[@]}"; do - if array_build l "source_$a"; then - aggregate+=("${l[@]}") - fi - done - - array_build "$1" "aggregate" -} - -get_all_sources_for_arch() { - local aggregate l - - if array_build l 'source'; then - aggregate+=("${l[@]}") - fi - - if array_build l "source_$CARCH"; then - aggregate+=("${l[@]}") - fi - - array_build "$1" "aggregate" -} - -get_integlist() { - local integ - local integlist=() - - for integ in "${known_hash_algos[@]}"; do - # check for e.g. "sha256sums" - local sumname="${integ}sums[@]" - if [[ -n ${!sumname} ]]; then - integlist+=("$integ") - continue - fi - - # check for e.g. "sha256sums_x86_64" - for a in "${arch[@]}"; do - local sumname="${integ}sums_${a}[@]" - if [[ -n ${!sumname} ]]; then - integlist+=("$integ") - break - fi - done - done - - if (( ${#integlist[@]} > 0 )); then - printf "%s\n" "${integlist[@]}" - else - printf "%s\n" "${INTEGRITY_CHECK[@]}" - fi -} diff --git a/pacman-tool-repo/util/schema.sh b/pacman-tool-repo/util/schema.sh deleted file mode 100644 index ae58aba..0000000 --- a/pacman-tool-repo/util/schema.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/bash -# -# schema.sh - declare specific groups of pkgbuild variables -# -# Copyright (c) 2015-2021 Pacman Development Team -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# - -[[ -n "$LIBMAKEPKG_SCHEMA_SH" ]] && return -LIBMAKEPKG_SCHEMA_SH=1 - -export UTILDIR="/usr/share/pacman" - -source $UTILDIR/util.sh - - -known_hash_algos=({ck,md5,sha{1,224,256,384,512},b2}) - -pkgbuild_schema_arrays=(arch backup checkdepends conflicts depends groups - license makedepends noextract optdepends options - provides replaces source validpgpkeys - "${known_hash_algos[@]/%/sums}") - -pkgbuild_schema_strings=(changelog epoch install pkgbase pkgdesc pkgrel pkgver - url) - -pkgbuild_schema_arch_arrays=(checkdepends conflicts depends makedepends - optdepends provides replaces source - "${known_hash_algos[@]/%/sums}") - -pkgbuild_schema_package_overrides=(pkgdesc arch url license groups depends - optdepends provides conflicts replaces - backup options install changelog) - -readonly -a known_hash_algos pkgbuild_schema_arrays \ - pkgbuild_schema_strings pkgbuild_schema_arch_arrays \ - pkgbuild_schema_package_overrides diff --git a/pacman-tool-repo/util/util.sh b/pacman-tool-repo/util/util.sh deleted file mode 100644 index 7840afb..0000000 --- a/pacman-tool-repo/util/util.sh +++ /dev/null @@ -1,114 +0,0 @@ -#!/bin/bash -# -# util.sh - general utility functions -# -# Copyright (c) 2006-2021 Pacman Development Team -# Copyright (c) 2002-2006 by Judd Vinet -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# - -[[ -n "$LIBMAKEPKG_UTIL_UTIL_SH" ]] && return -LIBMAKEPKG_UTIL_UTIL_SH=1 - -export UTILDIR="/usr/share/pacman" - -source $UTILDIR/error.sh -source $UTILDIR/message.sh - -## -# usage : in_array( $needle, $haystack ) -# return : 0 - found -# 1 - not found -## -in_array() { - local needle=$1; shift - local item - for item in "$@"; do - [[ $item = "$needle" ]] && return 0 # Found - done - return 1 # Not Found -} - -# tests if a variable is an array -is_array() { - local v=$1 - local ret=1 - - if [[ ${!v@a} = *a* ]]; then - ret=0 - fi - - return $ret -} - -# Canonicalize a directory path if it exists -canonicalize_path() { - local path="$1" - - if [[ -d $path ]]; then - ( - cd_safe "$path" - pwd -P - ) - else - printf "%s\n" "$path" - fi -} - -dir_is_empty() { - ( - shopt -s dotglob nullglob - files=("$1"/*) - (( ${#files} == 0 )) - ) -} - -cd_safe() { - if ! cd "$1"; then - error "$(gettext "Failed to change to directory %s")" "$1" - plainerr "$(gettext "Aborting...")" - exit 1 - fi -} - -# Try to create directory if one does not yet exist. Fails if the directory -# exists but has no write permissions, or if there is an existing file with -# the same name. -ensure_writable_dir() { - local dirtype="$1" dirpath="$2" - - if ! mkdir -p "$dirpath" 2>/dev/null; then - error "$(gettext "Failed to create the directory \$%s (%s).")" "$dirtype" "$dirpath" - return 1 - elif [[ ! -w $dirpath ]]; then - error "$(gettext "You do not have write permission for the directory \$%s (%s).")" "$dirtype" "$dirpath" - return 1 - fi - - return 0 -} - -# source a file and fail if it does not succeed -source_safe() { - local shellopts=$(shopt -p extglob) - shopt -u extglob - - if ! source "$@"; then - error "$(gettext "Failed to source %s")" "$1" - exit $E_MISSING_FILE - fi - - eval "$shellopts" -} diff --git a/test.sh b/test.sh deleted file mode 100755 index e5bca2b..0000000 --- a/test.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -repo-add --help