Compare commits

..

2 Commits

Author SHA1 Message Date
sbplat
cd07f789ce refactor: normalize remaining line endings 2024-02-11 10:56:39 -05:00
sbplat
d725cf9a01 refactor: normalize line endings 2024-02-10 13:09:09 -05:00
1246 changed files with 121417 additions and 304724 deletions

View File

@@ -1,5 +1,2 @@
# Formatting
5f771b785130154ed47952635b7acef371ffe0ec
# Normalize files
55d4fda01b2f39f5b7d7b4fda5214bd7ff0fd5dd
5f771b785130154ed47952635b7acef371ffe0ec

2
.gitattributes vendored
View File

@@ -3,8 +3,6 @@
# Ignore all JavaScript files in a directory
src/main/resources/static/pdfjs/* linguist-vendored
src/main/resources/static/pdfjs/** linguist-vendored
src/main/resources/static/pdfjs-legacy/* linguist-vendored
src/main/resources/static/pdfjs-legacy/** linguist-vendored
src/main/resources/static/css/bootstrap-icons.css linguist-vendored
src/main/resources/static/css/bootstrap.min.css linguist-vendored
src/main/resources/static/css/fonts/* linguist-vendored

13
.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1,13 @@
# These are supported funding model platforms
github: Frooodle # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
custom: ['https://paypal.me/froodleplex?country.x=GB&locale.x=en_GB'] # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']

View File

@@ -1,128 +0,0 @@
name: Bug Report
description: File a bug report.
title: "[Bug]: "
body:
- type: markdown
attributes:
value: |
## Bug Report
Thanks for taking the time to fill out this bug report!
This issue form is for reporting bugs only. Please fill out the following sections to help us understand the issue you are facing.
- type: dropdown
id: installation-method
attributes:
label: Installation Method
description: |
Indicate whether you are using Docker or a local installation.
options:
- Docker
- Docker ultra lite
- Docker fat
- Local Installation
- type: textarea
id: problem
validations:
required: true
attributes:
label: The Problem
description: |
Describe the issue you are experiencing here. Tell us what you were trying to do and what happened.
Provide a clear and concise description of what the problem is.
placeholder: Provide a detailed description of the issue.
- type: markdown
attributes:
value: |
## Environment
- type: input
id: version
validations:
required: true
attributes:
label: Version of Stirling-PDF
placeholder: e.g., 0.0.2
description: What version of Stirling-PDF has the issue?
- type: input
id: last-working-version
attributes:
label: Last Working Version of Stirling-PDF
placeholder: e.g., 0.0.1
description: |
If known, please provide the last version where the issue did not occur. Otherwise, leave blank.
- type: input
id: url
attributes:
label: Page Where the Problem Occurred
placeholder: e.g., http://localhost:8080/pdf/pipeline
description: |
If applicable, provide the URL where the issue occurred. Otherwise, leave blank.
- type: textarea
id: docker
attributes:
label: Docker Configuration
description: |
Enter your Docker configuration here if it is relevant to the error. Remove any personal data. Otherwise, leave the field blank.
render: txt
- type: markdown
attributes:
value: |
## Logs
- type: textarea
id: logs
attributes:
label: Relevant Log Output
description: |
Provide any log output that might help us diagnose the issue, such as error messages or stack traces.
render: txt
- type: markdown
attributes:
value: |
## Additional Information
- type: textarea
id: additional-info
attributes:
label: Additional Information
description: |
If you have any additional information that might help us understand and resolve the issue, provide it here.
- type: markdown
attributes:
value: |
## Browser Information
- type: dropdown
id: browsers
attributes:
label: Browsers Affected
description: |
If applicable, select the browsers where you are experiencing the issue. Otherwise, leave blank.
multiple: true
options:
- Firefox
- Chrome
- Safari
- Microsoft Edge
- Other
- type: checkboxes
id: terms
attributes:
label: No Duplicate of the Issue
description: |
Please confirm that you have searched for similar issues and none of them match your problem.
options:
- label: I have verified that there are no existing issues raised related to my problem.
required: true

View File

@@ -1,78 +0,0 @@
name: Feature Request
description: Submit a new feature request.
title: "[Feature Request]: "
labels:
- enhancement
body:
- type: markdown
attributes:
value: |
## Feature Request
Thank you for taking the time to suggest a new feature!
This form is for proposing features or enhancements. Please fill out the following sections to help us understand your idea or suggestion.
- type: textarea
id: feature-description
validations:
required: true
attributes:
label: Feature Description
description: |
Describe the feature you would like to see. Tell us what the feature should do and the problem it would solve.
Provide a clear and concise description of what you want to happen.
placeholder: Provide a detailed description of the desired feature.
- type: markdown
attributes:
value: |
## Motivation
- type: textarea
id: motivation
attributes:
label: Why is this feature valuable?
description: |
Explain why this feature is valuable to you or others. How would it improve the tool or process?
Describe any relevant scenarios that would benefit from this feature.
placeholder: Describe why this feature is important.
- type: markdown
attributes:
value: |
## Possible Implementation
- type: textarea
id: implementation
attributes:
label: Suggested Implementation
description: |
If you have ideas about how this feature could be implemented, describe them here.
This section is optional but can be helpful to guide initial discussions.
placeholder: Describe how this feature might be implemented.
- type: markdown
attributes:
value: |
## Additional Information
- type: textarea
id: additional-info
attributes:
label: Additional Information
description: |
If you have any additional information, comments, or resources you think would support or be relevant to your feature request, include them here.
- type: checkboxes
id: search-confirmation
attributes:
label: No Duplicate of the Feature
description: |
Please confirm that you have searched for similar features in our repository and found none that match your request.
options:
- label: I have verified that there are no existing features requests similar to my request.
required: true

View File

@@ -1,5 +0,0 @@
blank_issues_enabled: true
contact_links:
- name: 💬 Discord Server
url: https://discord.gg/Cn8pWhQRxZ
about: You can join our Discord server for real time discussion and support

View File

@@ -9,8 +9,6 @@ updates:
directory: "/" # Location of package manifests
schedule:
interval: "weekly"
open-pull-requests-limit: 10
rebase-strategy: "auto"
- package-ecosystem: "docker"
directory: "/" # Location of Dockerfile
schedule:

View File

@@ -1,54 +0,0 @@
Translation:
- changed-files:
- any-glob-to-any-file: 'src/main/resources/messages_*_*.properties'
- any-glob-to-any-file: 'scripts/ignore_translation.toml'
- any-glob-to-any-file: 'src/main/resources/templates/fragments/languages.html'
Front End:
- changed-files:
- any-glob-to-any-file: 'src/main/resources/templates/**/*'
- any-glob-to-any-file: 'src/main/resources/static/**/*'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/controller/web/**'
Java:
- changed-files:
- any-glob-to-any-file: 'src/main/java/**/*.java'
Back End:
- changed-files:
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/config/security/**/*'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/config/model/provider/**/*'
- any-glob-to-any-file: 'src/main/resources/settings.yml.template'
- any-glob-to-any-file: 'src/main/resources/banner.txt'
Security:
- changed-files:
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/config/security/**/*'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/config/model/provider/**/*'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/config/model/AuthenticationType.java'
API:
- changed-files:
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/controller/web/MetricsController.java'
- any-glob-to-any-file: 'src/main/java/stirling/software/SPDF/controller/api/**/*'
Documentation:
- changed-files:
- any-glob-to-any-file: '**/*.md'
- any-glob-to-any-file: 'scripts/counter_translation.py'
- any-glob-to-any-file: 'scripts/ignore_translation.toml'
Docker:
- changed-files:
- any-glob-to-any-file: 'Dockerfile'
- any-glob-to-any-file: 'Dockerfile-*'
- any-glob-to-any-file: 'exampleYmlFiles/*.yml'
Test:
- changed-files:
- any-glob-to-any-file: 'cucumber/**/*'
- any-glob-to-any-file: 'src/test**/*'
Github:
- changed-files:
- any-glob-to-any-file: '.github/**/*'

93
.github/labels.yml vendored
View File

@@ -1,93 +0,0 @@
# Labels names are important as they are used by Release Drafter to decide
# regarding where to record them in changelog or if to skip them.
#
# The repository labels will be automatically configured using this file and
# the GitHub Action https://github.com/marketplace/actions/github-labeler.
- name: "Back End"
color: "20CE6C"
description: "Issues related to back-end development"
from_name: "Back end"
- name: "Bug"
description: "Something isn't working"
color: "EB9CA6"
from_name: "bug"
- name: "dependencies"
description: "Pull requests that update a dependency file"
color: "5AA8FC"
- name: "Docker"
description: "Pull requests that update Docker code"
color: "1FCEFF"
from_name: "docker"
- name: "Documentation"
description: "Improvements or additions to documentation"
color: "35ABFF"
from_name: "documentation"
- name: "Done for next release"
color: "0CDBD1"
- name: "Done"
color: "60F13B"
- name: "duplicate"
description: "This issue or pull request already exists"
color: "CDD1D5"
- name: "enhancement"
description: "New feature or request"
color: "A0EEEE"
- name: "fix needs confirmation"
color: "60A1E7"
description: "Fix needs to be confirmed"
- name: "Front End"
color: "BBD2F1"
description: "Issues related to front-end development"
- name: "github-actions"
description: "Pull requests that update GitHub Actions code"
color: "999999"
from_name: "github_actions"
- name: "good first issue"
description: "Good for newcomers"
color: "C1B8FF"
- name: "help wanted"
description: "Extra attention is needed"
color: "00E6C4"
- name: "invalid"
description: "This doesn't seem right"
color: "E5E566"
- name: "Java"
description: "Pull requests that update Java code"
color: "FF9E1F"
from_name: "java"
- name: "Long-term Enhancement"
color: "BFDEC3"
description: "Enhancements planned for the long term"
- name: "more-info-needed"
color: "00E4F8"
description: "More information is needed"
- name: "needs investigation"
color: "B8C3A7"
description: "Issues that require further investigation"
- name: "Prioritised enhancement"
color: "4BA2EE"
description: "High-priority enhancements"
- name: "question"
description: "Further information is requested"
color: "D97EE5"
- name: "Translation"
color: "9FABF9"
from_name: "translation"
- name: "upstream"
color: "DEDEDE"
- name: "v2"
color: "FFFF00"
- name: "wontfix"
description: "This will not be worked on"
color: "FFFFFF"
- name: "Security"
color: "000000"
description: "Security-related issues or pull requests"
- name: "API"
color: "FFFF00"
description: "API-related issues or pull requests"
- name: "Test"
color: "FF9E1F"
description: "Testing-related issues or pull requests"
- name: "Stale"
color: "000000"

View File

@@ -4,10 +4,15 @@ Please provide a summary of the changes, including relevant motivation and conte
Closes #(issue_number)
## Checklist
## Checklist:
- [ ] I have read the [Contribution Guidelines](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/CONTRIBUTING.md)
- [ ] I have performed a self-review of my own code
- [ ] I have commented my code, particularly in hard-to-understand areas
- [ ] My changes generate no new warnings
- [ ] I have read the section [Add New Translation Tags](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/HowToAddNewLanguage.md#add-new-translation-tags) (for new translation tags only)
## Contributor License Agreement
By submitting this pull request, I acknowledge and agree that my contributions will be included in Stirling-PDF and that they can be relicensed in the future under the MPL 2.0 (Mozilla Public License Version 2.0) license.
(This does not change the general open-source nature of Stirling-PDF, simply moving from one license to another license)

32
.github/release.yml vendored
View File

@@ -1,32 +0,0 @@
changelog:
exclude:
labels:
- Documentation
- Test
- Github
categories:
- title: Bug Fixes
labels:
- Bug
- title: Enhancements
labels:
- enhancement
- title: Minor Enhancements
labels:
- Java
- Front End
- title: Docker Updates
labels:
- Docker
- title: Translation Changes
labels:
- Translation
- title: Other Changes
labels:
- "*"

View File

@@ -1,51 +0,0 @@
import sys
def find_duplicate_keys(file_path):
"""
Finds duplicate keys in a properties file and returns their occurrences.
This function reads a properties file, identifies any keys that occur more than
once, and returns a dictionary with these keys and the line numbers of their occurrences.
Parameters:
file_path (str): The path to the properties file to be checked.
Returns:
dict: A dictionary where each key is a duplicated key in the file, and the value is a list
of line numbers where the key occurs.
"""
with open(file_path, "r", encoding="utf-8") as file:
lines = file.readlines()
keys = {}
duplicates = {}
for line_number, line in enumerate(lines, start=1):
line = line.strip()
if line and not line.startswith("#") and "=" in line:
key = line.split("=", 1)[0].strip()
if key in keys:
# If the key already exists, add the current line number
duplicates.setdefault(key, []).append(line_number)
# Also add the first instance of the key if not already done
if keys[key] not in duplicates[key]:
duplicates[key].insert(0, keys[key])
else:
# Store the line number of the first instance of the key
keys[key] = line_number
return duplicates
if __name__ == "__main__":
failed = False
for ar in sys.argv[1:]:
duplicates = find_duplicate_keys(ar)
if duplicates:
for key, lines in duplicates.items():
lines_str = ", ".join(map(str, lines))
print(f"{key} duplicated in {ar} on lines {lines_str}")
failed = True
if failed:
sys.exit(1)

View File

@@ -1,254 +0,0 @@
"""
Author: Ludy87
Description: This script processes .properties files for localization checks. It compares translation files in a branch with
a reference file to ensure consistency. The script performs two main checks:
1. Verifies that the number of lines (including comments and empty lines) in the translation files matches the reference file.
2. Ensures that all keys in the translation files are present in the reference file and vice versa.
The script also provides functionality to update the translation files to match the reference file by adding missing keys and
adjusting the format.
Usage:
python script_name.py --reference-file <path_to_reference_file> --branch <branch_name> [--files <list_of_changed_files>]
"""
import copy
import glob
import os
import argparse
import re
def parse_properties_file(file_path):
"""Parses a .properties file and returns a list of objects (including comments, empty lines, and line numbers)."""
properties_list = []
with open(file_path, "r", encoding="utf-8") as file:
for line_number, line in enumerate(file, start=1):
stripped_line = line.strip()
# Empty lines
if not stripped_line:
properties_list.append(
{"line_number": line_number, "type": "empty", "content": ""}
)
continue
# Comments
if stripped_line.startswith("#"):
properties_list.append(
{
"line_number": line_number,
"type": "comment",
"content": stripped_line,
}
)
continue
# Key-value pairs
match = re.match(r"^([^=]+)=(.*)$", line)
if match:
key, value = match.groups()
properties_list.append(
{
"line_number": line_number,
"type": "entry",
"key": key.strip(),
"value": value.strip(),
}
)
return properties_list
def write_json_file(file_path, updated_properties):
updated_lines = {entry["line_number"]: entry for entry in updated_properties}
# Sort by line numbers and retain comments and empty lines
all_lines = sorted(set(updated_lines.keys()))
original_format = []
for line in all_lines:
if line in updated_lines:
entry = updated_lines[line]
else:
entry = None
ref_entry = updated_lines[line]
if ref_entry["type"] in ["comment", "empty"]:
original_format.append(ref_entry)
elif entry is None:
# Add missing entries from the reference file
original_format.append(ref_entry)
elif entry["type"] == "entry":
# Replace entries with those from the current JSON
original_format.append(entry)
# Write back in the original format
with open(file_path, "w", encoding="utf-8") as file:
for entry in original_format:
if entry["type"] == "comment":
file.write(f"{entry['content']}\n")
elif entry["type"] == "empty":
file.write(f"{entry['content']}\n")
elif entry["type"] == "entry":
file.write(f"{entry['key']}={entry['value']}\n")
def update_missing_keys(reference_file, file_list, branch=""):
reference_properties = parse_properties_file(reference_file)
for file_path in file_list:
basename_current_file = os.path.basename(branch + file_path)
if (
basename_current_file == os.path.basename(reference_file)
or not file_path.endswith(".properties")
or not basename_current_file.startswith("messages_")
):
continue
current_properties = parse_properties_file(branch + file_path)
updated_properties = []
for ref_entry in reference_properties:
ref_entry_copy = copy.deepcopy(ref_entry)
for current_entry in current_properties:
if current_entry["type"] == "entry":
if ref_entry_copy["type"] != "entry":
continue
if ref_entry_copy["key"] == current_entry["key"]:
ref_entry_copy["value"] = current_entry["value"]
updated_properties.append(ref_entry_copy)
write_json_file(branch + file_path, updated_properties)
def check_for_missing_keys(reference_file, file_list, branch):
update_missing_keys(reference_file, file_list, branch + "/")
def read_properties(file_path):
with open(file_path, "r", encoding="utf-8") as file:
return file.read().splitlines()
def check_for_differences(reference_file, file_list, branch):
reference_branch = reference_file.split("/")[0]
basename_reference_file = os.path.basename(reference_file)
report = []
report.append(
f"### 📋 Checking with the file `{basename_reference_file}` from the `{reference_branch}` - Checking the `{branch}`"
)
reference_lines = read_properties(reference_file)
has_differences = False
only_reference_file = True
for file_path in file_list:
basename_current_file = os.path.basename(branch + "/" + file_path)
if (
basename_current_file == basename_reference_file
or not file_path.endswith(".properties")
or not basename_current_file.startswith("messages_")
):
continue
only_reference_file = False
report.append(f"#### 🗂️ **Checking File:** `{basename_current_file}`...")
current_lines = read_properties(branch + "/" + file_path)
reference_line_count = len(reference_lines)
current_line_count = len(current_lines)
if reference_line_count != current_line_count:
report.append("")
report.append("- **Test 1 Status:** ❌ Failed")
has_differences = True
if reference_line_count > current_line_count:
report.append(
f" - **Issue:** Missing lines! Comments, empty lines, or translation strings are missing. Details: {reference_line_count} (reference) vs {current_line_count} (current)."
)
elif reference_line_count < current_line_count:
report.append(
f" - **Issue:** Too many lines! Check your translation files! Details: {reference_line_count} (reference) vs {current_line_count} (current)."
)
# update_missing_keys(reference_file, [file_path], branch + "/")
else:
report.append("- **Test 1 Status:** ✅ Passed")
# Check for missing or extra keys
current_keys = []
reference_keys = []
for line in current_lines:
if not line.startswith("#") and line != "" and "=" in line:
key, _ = line.split("=", 1)
current_keys.append(key)
for line in reference_lines:
if not line.startswith("#") and line != "" and "=" in line:
key, _ = line.split("=", 1)
reference_keys.append(key)
current_keys_set = set(current_keys)
reference_keys_set = set(reference_keys)
missing_keys = current_keys_set.difference(reference_keys_set)
extra_keys = reference_keys_set.difference(current_keys_set)
missing_keys_list = list(missing_keys)
extra_keys_list = list(extra_keys)
if missing_keys_list or extra_keys_list:
has_differences = True
missing_keys_str = "`, `".join(missing_keys_list)
extra_keys_str = "`, `".join(extra_keys_list)
report.append("- **Test 2 Status:** ❌ Failed")
if missing_keys_list:
report.append(
f" - **Issue:** There are keys in ***{basename_current_file}*** `{missing_keys_str}` that are not present in ***{basename_reference_file}***!"
)
if extra_keys_list:
report.append(
f" - **Issue:** There are keys in ***{basename_reference_file}*** `{extra_keys_str}` that are not present in ***{basename_current_file}***!"
)
# update_missing_keys(reference_file, [file_path], branch + "/")
else:
report.append("- **Test 2 Status:** ✅ Passed")
# if has_differences:
# report.append("")
# report.append(f"#### 🚧 ***{basename_current_file}*** will be corrected...")
report.append("")
report.append("---")
report.append("")
# update_file_list = glob.glob(branch + "/src/**/messages_*.properties", recursive=True)
# update_missing_keys(reference_file, update_file_list)
# report.append("---")
# report.append("")
if has_differences:
report.append("## ❌ Overall Check Status: **_Failed_**")
else:
report.append("## ✅ Overall Check Status: **_Success_**")
if not only_reference_file:
print("\n".join(report))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Find missing keys")
parser.add_argument(
"--reference-file",
required=True,
help="Path to the reference file.",
)
parser.add_argument(
"--branch",
type=str,
required=True,
help="Branch name.",
)
parser.add_argument(
"--files",
nargs="+",
required=False,
help="List of changed files, separated by spaces.",
)
args = parser.parse_args()
file_list = args.files
if file_list is None:
file_list = glob.glob(
os.getcwd() + "/src/**/messages_*.properties", recursive=True
)
update_missing_keys(args.reference_file, file_list)
else:
check_for_differences(args.reference_file, file_list, args.branch)

View File

@@ -1,85 +0,0 @@
"""check_tabulator.py"""
import argparse
import sys
def check_tabs(file_path):
"""
Checks for tabs in the specified file.
Args:
file_path (str): The path to the file to be checked.
Returns:
bool: True if tabs are found, False otherwise.
"""
with open(file_path, "r", encoding="utf-8") as file:
content = file.read()
if "\t" in content:
print(f"Tab found in {file_path}")
return True
return False
def replace_tabs_with_spaces(file_path, replace_with=" "):
"""
Replaces tabs with a specified number of spaces in the file.
Args:
file_path (str): The path to the file where tabs will be replaced.
replace_with (str): The character(s) to replace tabs with. Defaults to two spaces.
"""
with open(file_path, "r", encoding="utf-8") as file:
content = file.read()
updated_content = content.replace("\t", replace_with)
with open(file_path, "w", encoding="utf-8") as file:
file.write(updated_content)
def main():
"""
Main function to replace tabs with spaces in the provided files.
The replacement character and files to check are taken from command line arguments.
"""
# Create ArgumentParser instance
parser = argparse.ArgumentParser(
description="Replace tabs in files with specified characters."
)
# Define optional argument `--replace_with`
parser.add_argument(
"--replace_with",
default=" ",
help="Character(s) to replace tabs with. Default is two spaces.",
)
# Define argument for file paths
parser.add_argument("files", metavar="FILE", nargs="+", help="Files to process.")
# Parse arguments
args = parser.parse_args()
# Extract replacement characters and files from the parsed arguments
replace_with = args.replace_with
files_checked = args.files
error = False
for file_path in files_checked:
if check_tabs(file_path):
replace_tabs_with_spaces(file_path, replace_with)
error = True
if error:
print("Error: Originally found tabs in HTML files, now replaced.")
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
main()

View File

@@ -1,67 +0,0 @@
import re
import yaml
# Paths to the files
chart_yaml_path = "chart/stirling-pdf/Chart.yaml"
gradle_path = "build.gradle"
def get_chart_version(path):
"""
Reads the appVersion from Chart.yaml.
Args:
path (str): The file path to the Chart.yaml.
Returns:
str: The appVersion if found, otherwise an empty string.
"""
with open(path, encoding="utf-8") as file:
chart_yaml = yaml.safe_load(file)
return chart_yaml.get("appVersion", "")
def get_gradle_version(path):
"""
Extracts the version from build.gradle.
Args:
path (str): The file path to the build.gradle.
Returns:
str: The version if found, otherwise an empty string.
"""
with open(path, encoding="utf-8") as file:
for line in file:
if "version =" in line:
# Extracts the value after 'version ='
return re.search(r'version\s*=\s*[\'"](.+?)[\'"]', line).group(1)
return ""
def update_chart_version(path, new_version):
"""
Updates the appVersion in Chart.yaml with a new version.
Args:
path (str): The file path to the Chart.yaml.
new_version (str): The new version to update to.
"""
with open(path, encoding="utf-8") as file:
chart_yaml = yaml.safe_load(file)
chart_yaml["appVersion"] = new_version
with open(path, "w", encoding="utf-8") as file:
yaml.safe_dump(chart_yaml, file)
# Main logic
chart_version = get_chart_version(chart_yaml_path)
gradle_version = get_gradle_version(gradle_path)
if chart_version != gradle_version:
print(
f"Versions do not match. Updating Chart.yaml from {chart_version} to {gradle_version}."
)
update_chart_version(chart_yaml_path, gradle_version)
else:
print("Versions match. No update required.")

View File

@@ -1,20 +0,0 @@
name: "Pull Request Labeler"
on:
pull_request_target:
types: [opened, synchronize]
jobs:
labeler:
permissions:
contents: read
pull-requests: write
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Apply Labels
uses: actions/labeler@v5
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
configuration-path: .github/labeler-config.yml
sync-labels: true

View File

@@ -1,10 +1,10 @@
name: Build repo
name: "Build repo"
on:
push:
branches: ["main"]
branches: [ "main" ]
pull_request:
branches: ["main"]
branches: [ "main" ]
jobs:
build:
@@ -17,72 +17,18 @@ jobs:
strategy:
fail-fast: false
matrix:
jdk-version: [17, 21]
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Checkout repository
uses: actions/checkout@v3
- name: Set up JDK ${{ matrix.jdk-version }}
uses: actions/setup-java@v4
with:
java-version: ${{ matrix.jdk-version }}
distribution: "temurin"
- name: Set up JDK 17
uses: actions/setup-java@v3
with:
java-version: '17'
distribution: 'temurin'
- name: Set up Gradle
uses: gradle/actions/setup-gradle@v4
with:
gradle-version: 8.7
- name: Build with Gradle
run: ./gradlew build --no-build-cache
docker-compose-tests:
# if: github.event_name == 'push' && github.ref == 'refs/heads/main' ||
# (github.event_name == 'pull_request' &&
# contains(github.event.pull_request.labels.*.name, 'licenses') == false &&
# (
# contains(github.event.pull_request.labels.*.name, 'Front End') ||
# contains(github.event.pull_request.labels.*.name, 'Java') ||
# contains(github.event.pull_request.labels.*.name, 'Back End') ||
# contains(github.event.pull_request.labels.*.name, 'Security') ||
# contains(github.event.pull_request.labels.*.name, 'API') ||
# contains(github.event.pull_request.labels.*.name, 'Docker') ||
# contains(github.event.pull_request.labels.*.name, 'Test')
# )
# )
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Set up Java 17
uses: actions/setup-java@v4
with:
java-version: "17"
distribution: "adopt"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Install Docker Compose
run: |
sudo curl -SL "https://github.com/docker/compose/releases/download/v2.29.1/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
sudo chmod +x /usr/local/bin/docker-compose
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.7"
- name: Pip requirements
run: |
pip install -r ./cucumber/requirements.txt
- name: Run Docker Compose Tests
run: |
chmod +x ./test.sh
./test.sh
- uses: gradle/gradle-build-action@v2.4.2
with:
gradle-version: 7.6
arguments: build --no-build-cache

View File

@@ -1,202 +0,0 @@
name: Check Properties Files
on:
pull_request_target:
types: [opened, synchronize, reopened]
paths:
- "src/main/resources/messages_*.properties"
push:
paths:
- "src/main/resources/messages_en_GB.properties"
permissions:
contents: write
pull-requests: write
jobs:
check-files:
if: github.event_name == 'pull_request_target'
runs-on: ubuntu-latest
steps:
- name: Checkout PR branch
uses: actions/checkout@v4
with:
repository: ${{ github.event.pull_request.head.repo.full_name }}
ref: ${{ github.event.pull_request.head.ref }}
path: pr-branch
fetch-depth: 0
- name: Checkout main branch
uses: actions/checkout@v4
with:
ref: main
path: main-branch
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.x"
- name: Install GitHub CLI
run: sudo apt-get update && sudo apt-get install -y gh
- name: Fetch PR changed files
id: fetch-pr-changes
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo "Fetching PR changed files..."
cd pr-branch
gh repo set-default ${{ github.repository }}
gh pr view ${{ github.event.pull_request.number }} --json files -q ".files[].path" > ../changed_files.txt
cd ..
echo $(cat changed_files.txt)
BRANCH_PATH="pr-branch"
echo "BRANCH_PATH=${BRANCH_PATH}" >> $GITHUB_ENV
CHANGED_FILES=$(cat changed_files.txt | tr '\n' ' ')
echo "CHANGED_FILES=${CHANGED_FILES}" >> $GITHUB_ENV
echo "Changed files: ${CHANGED_FILES}"
echo "Branch: ${BRANCH_PATH}"
- name: Determine reference file
id: determine-file
run: |
echo "Determining reference file..."
if echo "${{ env.CHANGED_FILES }}" | grep -q 'src/main/resources/messages_en_GB.properties'; then
echo "REFERENCE_FILE=pr-branch/src/main/resources/messages_en_GB.properties" >> $GITHUB_ENV
else
echo "REFERENCE_FILE=main-branch/src/main/resources/messages_en_GB.properties" >> $GITHUB_ENV
fi
echo "REFERENCE_FILE=${{ env.REFERENCE_FILE }}"
- name: Show REFERENCE_FILE
run: echo "Reference file is set to ${{ env.REFERENCE_FILE }}"
- name: Run Python script to check files
id: run-check
run: |
python main-branch/.github/scripts/check_language_properties.py --reference-file ${{ env.REFERENCE_FILE }} --branch ${{ env.BRANCH_PATH }} --files ${{ env.CHANGED_FILES }} > failure.txt || true
- name: Capture output
id: capture-output
run: |
if [ -f failure.txt ] && [ -s failure.txt ]; then
echo "Test failed, capturing output..."
ERROR_OUTPUT=$(cat failure.txt)
echo "ERROR_OUTPUT<<EOF" >> $GITHUB_ENV
echo "$ERROR_OUTPUT" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
echo $ERROR_OUTPUT
else
echo "No errors found."
echo "ERROR_OUTPUT=" >> $GITHUB_ENV
fi
- name: Post comment on PR
if: env.ERROR_OUTPUT != ''
uses: actions/github-script@v7
with:
script: |
const { GITHUB_REPOSITORY, ERROR_OUTPUT } = process.env;
const [repoOwner, repoName] = GITHUB_REPOSITORY.split('/');
const prNumber = context.issue.number;
// Find existing comment
const comments = await github.rest.issues.listComments({
owner: repoOwner,
repo: repoName,
issue_number: prNumber
});
const comment = comments.data.find(c => c.body.includes("## 🚀 Translation Verification Summary"));
// Only allow the action user to update comments
const expectedActor = "github-actions[bot]";
if (comment && comment.user.login === expectedActor) {
// Update existing comment
await github.rest.issues.updateComment({
owner: repoOwner,
repo: repoName,
comment_id: comment.id,
body: `## 🚀 Translation Verification Summary\n\n\n${ERROR_OUTPUT}\n`
});
console.log("Updated existing comment.");
} else if (!comment) {
// Create new comment if no existing comment is found
await github.rest.issues.createComment({
owner: repoOwner,
repo: repoName,
issue_number: prNumber,
body: `## 🚀 Translation Verification Summary\n\n\n${ERROR_OUTPUT}\n`
});
console.log("Created new comment.");
} else {
console.log("Comment update attempt denied. Actor does not match.");
}
# - name: Set up git config
# run: |
# git config --global user.name "github-actions[bot]"
# git config --global user.email "github-actions[bot]@users.noreply.github.com"
# - name: Add translation keys
# run: |
# cd ${{ env.BRANCH_PATH }}
# git add src/main/resources/messages_*.properties
# git diff --staged --quiet || echo "CHANGES_DETECTED=true" >> $GITHUB_ENV
# git commit -m "Update translation files" || echo "No changes to commit"
# - name: Push
# if: env.CHANGES_DETECTED == 'true'
# run: |
# cd pr-branch
# git remote set-url origin https://x-access-token:${{ secrets.GITHUB_TOKEN }}@github.com/${{ github.event.pull_request.head.repo.full_name }}.git
# git push origin ${{ github.head_ref }} || echo "Push failed: possibly no changes to push"
update-translations-main:
if: github.event_name == 'push'
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.x"
- name: Run Python script to check files
id: run-check
run: |
python .github/scripts/check_language_properties.py --reference-file src/main/resources/messages_en_GB.properties --branch main
- name: Set up git config
run: |
git config --global user.name "github-actions[bot]"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
- name: Add translation keys
run: |
git add src/main/resources/messages_*.properties
git diff --staged --quiet || echo "CHANGES_DETECTED=true" >> $GITHUB_ENV
- name: Create Pull Request
id: cpr
if: env.CHANGES_DETECTED == 'true'
uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: "Update translation files"
committer: GitHub Action <action@github.com>
author: GitHub Action <action@github.com>
signoff: true
branch: update_translation_files
title: "Update translation files"
body: |
Auto-generated by [create-pull-request][1]
[1]: https://github.com/peter-evans/create-pull-request
labels: Translation
draft: false
delete-branch: true

View File

@@ -5,7 +5,7 @@ on:
branches:
- main
paths:
- "build.gradle"
- 'build.gradle'
permissions:
contents: write
@@ -17,15 +17,13 @@ jobs:
steps:
- name: Check out code
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Set up JDK 17
uses: actions/setup-java@v4
uses: actions/setup-java@v3
with:
java-version: "17"
distribution: "adopt"
- uses: gradle/actions/setup-gradle@v4
java-version: '17'
distribution: 'adopt'
- name: Run Gradle Command
run: ./gradlew clean generateLicenseReport
@@ -34,46 +32,17 @@ jobs:
run: |
mv build/reports/dependency-license/index.json src/main/resources/static/3rdPartyLicenses.json
- name: Set up git config
run: |
git config --global user.name "github-actions[bot]"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
- name: Run git add
- name: Check for Changes
id: git-check
run: |
git add src/main/resources/static/3rdPartyLicenses.json
git diff --staged --quiet || echo "CHANGES_DETECTED=true" >> $GITHUB_ENV
git diff --staged --exit-code || echo "changes=true" >> $GITHUB_ENV
- name: Create Pull Request
id: cpr
if: env.CHANGES_DETECTED == 'true'
uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: "Update 3rd Party Licenses"
committer: GitHub Action <action@github.com>
author: GitHub Action <action@github.com>
signoff: true
branch: update-3rd-party-licenses
title: "Update 3rd Party Licenses"
body: |
Auto-generated by [create-pull-request][1]
- name: Commit and Push Changes
if: env.changes == 'true'
run: |
git config --global user.name 'Stirling-PDF-Bot'
git config --global user.email 'Stirling-PDF-Bot@stirlingtools.com'
git commit -m "Update 3rd Party Licenses"
git push
[1]: https://github.com/peter-evans/create-pull-request
labels: licenses
draft: false
delete-branch: true
- name: Auto approve
if: steps.cpr.outputs.pull-request-operation == 'created'
run: gh pr review --approve "${{ steps.cpr.outputs.pull-request-number }}"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Enable auto-merge
if: steps.cpr.outputs.pull-request-operation == 'created'
uses: peter-evans/enable-pull-request-automerge@v3
with:
token: ${{ secrets.GITHUB_TOKEN }}
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
merge-method: squash # Choose the merge method: merge, squash, or rebase

View File

@@ -1,24 +0,0 @@
name: Manage labels
on:
schedule:
- cron: "30 20 * * *"
permissions:
contents: read
issues: write
jobs:
labeler:
name: Labeler
runs-on: ubuntu-latest
steps:
- name: Check out the repository
uses: actions/checkout@v4
- name: Run Labeler
uses: crazy-max/ghaction-github-labeler@v5
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
yaml-file: .github/labels.yml
skip-delete: true

View File

@@ -3,10 +3,9 @@ name: Push Docker Image with VersionNumber
on:
workflow_dispatch:
push:
branches:
branches:
- master
- main
permissions:
contents: read
packages: write
@@ -14,126 +13,139 @@ jobs:
push:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up JDK 17
uses: actions/setup-java@v4
with:
java-version: "17"
distribution: "temurin"
- uses: actions/checkout@v3.5.2
- name: Set up JDK 17
uses: actions/setup-java@v3.11.0
with:
java-version: '17'
distribution: 'temurin'
- uses: gradle/actions/setup-gradle@v4
with:
gradle-version: 8.7
- uses: gradle/gradle-build-action@v2.4.2
env:
DOCKER_ENABLE_SECURITY: false
with:
gradle-version: 7.6
arguments: clean build
- name: Run Gradle Command
run: ./gradlew clean build
env:
DOCKER_ENABLE_SECURITY: false
- name: Make Gradle wrapper executable
run: chmod +x gradlew
- name: Get version number
id: versionNumber
run: echo "::set-output name=versionNumber::$(./gradlew printVersion --quiet | tail -1)"
- name: Login to Docker Hub
uses: docker/login-action@v2.1.0
with:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_API }}
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3
- name: Login to GitHub Container Registry
uses: docker/login-action@v2.1.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ github.token }}
- name: Get version number
id: versionNumber
run: echo "versionNumber=$(./gradlew printVersion --quiet | tail -1)" >> $GITHUB_OUTPUT
- name: Convert repository owner to lowercase
id: repoowner
run: echo "::set-output name=lowercase::$(echo ${{ github.repository_owner }} | awk '{print tolower($0)}')"
- name: Generate tags
id: meta
uses: docker/metadata-action@v4.4.0
with:
images: |
${{ secrets.DOCKER_HUB_USERNAME }}/s-pdf
ghcr.io/${{ steps.repoowner.outputs.lowercase }}/s-pdf
tags: |
type=raw,value=${{ steps.versionNumber.outputs.versionNumber }},enable=${{ github.ref == 'refs/heads/master' }}
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
type=raw,value=alpha,enable=${{ github.ref == 'refs/heads/main' }}
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_API }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v2.1.0
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ github.token }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2.5.0
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Build and push main Dockerfile
uses: docker/build-push-action@v4.0.0
with:
context: .
dockerfile: ./Dockerfile
push: true
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args:
VERSION_TAG=${{ steps.versionNumber.outputs.versionNumber }}
platforms: linux/amd64,linux/arm64/v8
- name: Convert repository owner to lowercase
id: repoowner
run: echo "lowercase=$(echo ${{ github.repository_owner }} | awk '{print tolower($0)}')" >> $GITHUB_OUTPUT
- name: Generate tags
id: meta
uses: docker/metadata-action@v5
with:
images: |
${{ secrets.DOCKER_HUB_USERNAME }}/s-pdf
ghcr.io/${{ steps.repoowner.outputs.lowercase }}/s-pdf
tags: |
type=raw,value=${{ steps.versionNumber.outputs.versionNumber }},enable=${{ github.ref == 'refs/heads/master' }}
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
type=raw,value=alpha,enable=${{ github.ref == 'refs/heads/main' }}
- name: Build and push main Dockerfile
uses: docker/build-push-action@v6
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile
push: true
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: VERSION_TAG=${{ steps.versionNumber.outputs.versionNumber }}
platforms: linux/amd64,linux/arm64/v8
- name: Generate tags ultra-lite
id: meta2
uses: docker/metadata-action@v4.4.0
if: github.ref != 'refs/heads/main'
with:
images: |
${{ secrets.DOCKER_HUB_USERNAME }}/s-pdf
ghcr.io/${{ steps.repoowner.outputs.lowercase }}/s-pdf
tags: |
type=raw,value=${{ steps.versionNumber.outputs.versionNumber }}-ultra-lite,enable=${{ github.ref == 'refs/heads/master' }}
type=raw,value=latest-ultra-lite,enable=${{ github.ref == 'refs/heads/master' }}
- name: Generate tags ultra-lite
id: meta2
uses: docker/metadata-action@v5
if: github.ref != 'refs/heads/main'
with:
images: |
${{ secrets.DOCKER_HUB_USERNAME }}/s-pdf
ghcr.io/${{ steps.repoowner.outputs.lowercase }}/s-pdf
tags: |
type=raw,value=${{ steps.versionNumber.outputs.versionNumber }}-ultra-lite,enable=${{ github.ref == 'refs/heads/master' }}
type=raw,value=latest-ultra-lite,enable=${{ github.ref == 'refs/heads/master' }}
- name: Build and push Dockerfile-ultra-lite
uses: docker/build-push-action@v4.0.0
if: github.ref != 'refs/heads/main'
with:
context: .
file: ./Dockerfile-ultra-lite
push: true
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ steps.meta2.outputs.tags }}
labels: ${{ steps.meta2.outputs.labels }}
build-args:
VERSION_TAG=${{ steps.versionNumber.outputs.versionNumber }}
platforms: linux/amd64,linux/arm64/v8
- name: Build and push Dockerfile-ultra-lite
uses: docker/build-push-action@v6
if: github.ref != 'refs/heads/main'
with:
context: .
file: ./Dockerfile-ultra-lite
push: true
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ steps.meta2.outputs.tags }}
labels: ${{ steps.meta2.outputs.labels }}
build-args: VERSION_TAG=${{ steps.versionNumber.outputs.versionNumber }}
platforms: linux/amd64,linux/arm64/v8
- name: Generate tags fat
id: meta3
uses: docker/metadata-action@v5
if: github.ref != 'refs/heads/main'
with:
images: |
${{ secrets.DOCKER_HUB_USERNAME }}/s-pdf
ghcr.io/${{ steps.repoowner.outputs.lowercase }}/s-pdf
tags: |
type=raw,value=${{ steps.versionNumber.outputs.versionNumber }}-fat,enable=${{ github.ref == 'refs/heads/master' }}
type=raw,value=latest-fat,enable=${{ github.ref == 'refs/heads/master' }}
- name: Build and push main Dockerfile fat
uses: docker/build-push-action@v6
if: github.ref != 'refs/heads/main'
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile-fat
push: true
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ steps.meta3.outputs.tags }}
labels: ${{ steps.meta3.outputs.labels }}
build-args: VERSION_TAG=${{ steps.versionNumber.outputs.versionNumber }}
platforms: linux/amd64,linux/arm64/v8
- name: Generate tags lite
id: meta3
uses: docker/metadata-action@v4.4.0
if: github.ref != 'refs/heads/main'
with:
images: |
${{ secrets.DOCKER_HUB_USERNAME }}/s-pdf
ghcr.io/${{ steps.repoowner.outputs.lowercase }}/s-pdf
tags: |
type=raw,value=${{ steps.versionNumber.outputs.versionNumber }}-lite,enable=${{ github.ref == 'refs/heads/master' }}
type=raw,value=latest-lite,enable=${{ github.ref == 'refs/heads/master' }}
- name: Build and push Dockerfile-lite
uses: docker/build-push-action@v4.0.0
if: github.ref != 'refs/heads/main'
with:
context: .
file: ./Dockerfile-lite
push: true
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ steps.meta3.outputs.tags }}
labels: ${{ steps.meta3.outputs.labels }}
build-args:
VERSION_TAG=${{ steps.versionNumber.outputs.versionNumber }}
platforms: linux/amd64,linux/arm64/v8
- name: Build and Push Helm Chart
run: |
helm package chart/stirling-pdf
helm push stirling-pdf-chart-1.0.0.tgz oci://registry-1.docker.io/frooodle

View File

@@ -1,8 +1,7 @@
name: Release Artifacts
on:
workflow_dispatch:
release:
on:
release:
types: [created]
permissions:
contents: write
@@ -15,61 +14,44 @@ jobs:
enable_security: [true, false]
include:
- enable_security: true
file_suffix: "-with-login"
file_suffix: '-with-login'
- enable_security: false
file_suffix: ""
file_suffix: ''
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3.5.2
- name: Set up JDK 17
uses: actions/setup-java@v3.11.0
with:
java-version: '17'
distribution: 'temurin'
- name: Grant execute permission for gradlew
run: chmod +x gradlew
- name: Set up JDK 17
uses: actions/setup-java@v4
with:
java-version: "17"
distribution: "temurin"
- name: Generate jar (With Security=${{ matrix.enable_security }})
run: ./gradlew clean createExe
env:
DOCKER_ENABLE_SECURITY: ${{ matrix.enable_security }}
- uses: gradle/actions/setup-gradle@v4
with:
gradle-version: 8.7
- name: Generate jar (With Security=${{ matrix.enable_security }})
run: ./gradlew clean createExe
env:
DOCKER_ENABLE_SECURITY: ${{ matrix.enable_security }}
- name: Get version number
id: versionNumber
run: echo "versionNumber=$(./gradlew printVersion --quiet | tail -1)" >> $GITHUB_OUTPUT
- name: Rename binarie
if: matrix.file_suffix != ''
run: cp ./build/launch4j/Stirling-PDF.exe ./build/launch4j/Stirling-PDF${{ matrix.file_suffix }}.exe
- name: Upload Assets binarie
uses: actions/upload-artifact@v4
with:
path: ./build/launch4j/Stirling-PDF${{ matrix.file_suffix }}.exe
name: Stirling-PDF${{ matrix.file_suffix }}.exe
overwrite: true
retention-days: 1
if-no-files-found: error
- name: Upload binaries to release
uses: softprops/action-gh-release@v2
with:
files: ./build/launch4j/Stirling-PDF${{ matrix.file_suffix }}.exe
- name: Rename jar binaries
run: cp ./build/libs/Stirling-PDF-${{ steps.versionNumber.outputs.versionNumber }}.jar ./build/libs/Stirling-PDF${{ matrix.file_suffix }}.jar
- name: Upload Assets jar binaries
uses: actions/upload-artifact@v4
with:
path: ./build/libs/Stirling-PDF${{ matrix.file_suffix }}.jar
name: Stirling-PDF${{ matrix.file_suffix }}.jar
overwrite: true
retention-days: 1
if-no-files-found: error
- name: Upload jar binaries to release
uses: softprops/action-gh-release@v2
with:
files: ./build/libs/Stirling-PDF${{ matrix.file_suffix }}.jar
- name: Upload binaries to release
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: ./build/launch4j/Stirling-PDF.exe
asset_name: Stirling-PDF${{ matrix.file_suffix }}.exe
tag: ${{ github.ref }}
overwrite: true
- name: Get version number
id: versionNumber
run: echo "::set-output name=versionNumber::$(./gradlew printVersion --quiet | tail -1)"
- name: Upload jar binaries to release
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: ./build/libs/Stirling-PDF-${{ steps.versionNumber.outputs.versionNumber }}.jar
asset_name: Stirling-PDF${{ matrix.file_suffix }}.jar
tag: ${{ github.ref }}
overwrite: true

View File

@@ -1,32 +0,0 @@
name: Close stale issues
on:
schedule:
- cron: "30 0 * * *"
workflow_dispatch:
jobs:
stale:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
steps:
- name: 30 days stale issues
uses: actions/stale@v9
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 30
days-before-close: 7
stale-issue-message: >
This issue has been automatically marked as stale because it has had no recent activity.
It will be closed if no further activity occurs. Thank you for your contributions.
close-issue-message: >
This issue has been automatically closed because it has had no recent activity after being marked as stale.
Please reopen if you need further assistance.
stale-issue-label: "Stale"
remove-stale-when-updated: true
only-issue-labels: "more-info-needed"
days-before-pr-stale: -1 # Prevents PRs from being marked as stale
days-before-pr-close: -1 # Prevents PRs from being closed
start-date: '2024-07-06T00:00:00Z' # ISO 8601 Format

View File

@@ -3,37 +3,35 @@ name: Update Swagger
on:
workflow_dispatch:
push:
branches:
branches:
- master
jobs:
push:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up JDK 17
uses: actions/setup-java@v4
with:
java-version: "17"
distribution: "temurin"
- uses: actions/checkout@v3.5.2
- name: Set up JDK 17
uses: actions/setup-java@v3.11.0
with:
java-version: '17'
distribution: 'temurin'
- name: Grant execute permission for gradlew
run: chmod +x gradlew
- uses: gradle/actions/setup-gradle@v4
- name: Generate Swagger documentation
run: ./gradlew generateOpenApiDocs
- name: Generate Swagger documentation
run: ./gradlew generateOpenApiDocs
- name: Upload Swagger Documentation to SwaggerHub
run: ./gradlew swaggerhubUpload
env:
SWAGGERHUB_API_KEY: ${{ secrets.SWAGGERHUB_API_KEY }}
- name: Upload Swagger Documentation to SwaggerHub
run: ./gradlew swaggerhubUpload
env:
SWAGGERHUB_API_KEY: ${{ secrets.SWAGGERHUB_API_KEY }}
- name: Get version number
id: versionNumber
run: echo "versionNumber=$(./gradlew printVersion --quiet | tail -1)" >> $GITHUB_OUTPUT
- name: Set API version as published and default on SwaggerHub
run: |
curl -X PUT -H "Authorization: ${SWAGGERHUB_API_KEY}" "https://api.swaggerhub.com/apis/Frooodle/Stirling-PDF/${{ steps.versionNumber.outputs.versionNumber }}/settings/lifecycle" -H "accept: application/json" -H "Content-Type: application/json" -d "{\"published\":true,\"default\":true}"
env:
SWAGGERHUB_API_KEY: ${{ secrets.SWAGGERHUB_API_KEY }}
- name: Set API version as published and default on SwaggerHub
run: |
curl -X PUT -H "Authorization: ${SWAGGERHUB_API_KEY}" "https://api.swaggerhub.com/apis/Frooodle/Stirling-PDF/${{ steps.versionNumber.outputs.versionNumber }}/settings/lifecycle" -H "accept: application/json" -H "Content-Type: application/json" -d "{\"published\":true,\"default\":true}"
env:
SWAGGERHUB_API_KEY: ${{ secrets.SWAGGERHUB_API_KEY }}

View File

@@ -1,92 +0,0 @@
name: Sync Files
on:
push:
branches:
- main
paths:
- "build.gradle"
- "src/main/resources/messages_*.properties"
- "scripts/ignore_translation.toml"
permissions:
contents: write
pull-requests: write
jobs:
sync-versions:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.x"
- name: Install dependencies
run: pip install pyyaml
- name: Sync versions
run: python .github/scripts/gradle_to_chart.py
- name: Set up git config
run: |
git config --global user.name "github-actions[bot]"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
- name: Run git add
run: |
git add .
git diff --staged --quiet || git commit -m ":floppy_disk: Sync Versions
> Made via sync_files.yml" || echo "no changes"
- name: Create Pull Request
uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: Update files
committer: GitHub Action <action@github.com>
author: GitHub Action <action@github.com>
signoff: true
branch: sync_version
title: ":floppy_disk: Update Version"
body: |
Auto-generated by [create-pull-request][1]
[1]: https://github.com/peter-evans/create-pull-request
draft: false
delete-branch: true
labels: github-actions
sync-readme:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.x"
- name: Install dependencies
run: pip install tomlkit
- name: Sync README
run: python scripts/counter_translation.py
- name: Set up git config
run: |
git config --global user.name "github-actions[bot]"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
- name: Run git add
run: |
git add .
git diff --staged --quiet || git commit -m ":memo: Sync README
> Made via sync_files.yml" || echo "no changes"
- name: Create Pull Request
uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: Update files
committer: GitHub Action <action@github.com>
author: GitHub Action <action@github.com>
signoff: true
branch: sync_readme
title: ":memo: Update README: Translation Progress Table"
body: |
Auto-generated by [create-pull-request][1]
[1]: https://github.com/peter-evans/create-pull-request
draft: false
delete-branch: true
labels: Documentation,Translation,github-actions

56
.github/workflows/test.yml vendored Normal file
View File

@@ -0,0 +1,56 @@
name: Docker Compose Tests
on:
pull_request:
paths:
- 'src/**'
- '**.gradle'
- '!src/main/java/resources/messages*'
- 'exampleYmlFiles/**'
- 'Dockerfile'
- 'Dockerfile**'
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@v2
- name: Set up Java 17
uses: actions/setup-java@v2
with:
java-version: '17'
distribution: 'adopt'
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Run Docker Compose Tests
run: |
chmod +x ./gradlew
- name: Get version number
id: versionNumber
run: echo "::set-output name=versionNumber::$(./gradlew printVersion --quiet | tail -1)"
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ steps.versionNumber.outputs.versionNumber }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Install Docker Compose
run: |
sudo curl -L "https://github.com/docker/compose/releases/download/v2.6.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
sudo chmod +x /usr/local/bin/docker-compose
- name: Run Docker Compose Tests
run: |
chmod +x ./test.sh
./test.sh

48
.gitignore vendored
View File

@@ -1,3 +1,5 @@
### Eclipse ###
.metadata
bin/
@@ -20,6 +22,7 @@ customFiles/
configs/
watchedFolders/
# Gradle
.gradle
.lock
@@ -116,48 +119,9 @@ watchedFolders/
*.db
/build
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*.pyo
# Virtual environments
.env*
.venv*
env*/
venv*/
ENV/
env.bak/
venv.bak/
# VS Code
/.vscode/**/*
!/.vscode/settings.json
# IntelliJ IDEA
.idea/
*.iml
out/
/.vscode
/.idea
# Ignore Mac DS_Store files
.DS_Store
**/.DS_Store
# cucumber
/cucumber/reports/**
# Certs
*.p12
*.pem
*.crt
*.cer
*.der
*.key
*.csr
# cache
.ruff_cache
.mypy_cache
.pytest_cache
.ipynb_checkpoints
**/.DS_Store

View File

@@ -1,39 +0,0 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.2.1
hooks:
- id: ruff
args:
- --fix
- --line-length=127
files: ^((.github/scripts|scripts)/.+)?[^/]+\.py$
exclude: (split_photos.py)
- id: ruff-format
files: ^((.github/scripts|scripts)/.+)?[^/]+\.py$
exclude: (split_photos.py)
- repo: https://github.com/codespell-project/codespell
rev: v2.2.6
hooks:
- id: codespell
args:
- --ignore-words-list=
- --skip="./.*,*.csv,*.json,*.ambr"
- --quiet-level=2
files: \.(properties|html|css|js|py|md)$
exclude: (.vscode|.devcontainer|src/main/resources|Dockerfile)
- repo: local
hooks:
- id: check-duplicate-properties-keys
name: Check Duplicate Properties Keys
entry: python .github/scripts/check_duplicates.py
language: python
files: ^(src)/.+\.properties$
- repo: local
hooks:
- id: check-html-tabs
name: Check HTML for tabs
# args: ["--replace_with= "]
entry: python .github/scripts/check_tabulator.py
language: python
exclude: ^(src/main/resources/static/pdfjs|src/main/resources/static/pdfjs-legacy)
files: ^.*(\.html|\.css|\.js)$

53
.vscode/settings.json vendored
View File

@@ -1,53 +0,0 @@
{
"java.compile.nullAnalysis.mode": "automatic",
"files.eol": "auto",
"java.configuration.updateBuildConfiguration": "interactive",
"black-formatter.args": ["--line-length", "127"],
"flake8.args": ["--max-line-length", "127"],
"pylint.args": ["max-line-length", "127"],
"[java]": {
"editor.tabSize": 4,
"editor.detectIndentation": false,
"editor.rulers": [127]
},
"[python]": {
"editor.tabSize": 2,
"editor.detectIndentation": false,
"editor.rulers": [127]
},
"[gradle-build]": {
"editor.tabSize": 4,
"editor.detectIndentation": false,
"editor.rulers": [127]
},
"[gradle]": {
"editor.tabSize": 4,
"editor.detectIndentation": false,
"editor.rulers": [127]
},
"[html]": {
"editor.tabSize": 2,
"editor.rulers": [127],
"files.trimFinalNewlines": false,
"files.insertFinalNewline": false
},
"[javascript]": {
"editor.tabSize": 2,
"editor.rulers": [127]
},
"[yaml]": {
"files.trimFinalNewlines": false,
"files.insertFinalNewline": false
},
"diffEditor.maxComputationTime": 0,
"editor.wordSegmenterLocales": null,
"editor.guides.bracketPairs": "active",
"editor.guides.bracketPairsHorizontal": "active",
"files.insertFinalNewline": true,
"files.trimFinalNewlines": true,
"files.trimTrailingWhitespace": true,
"editor.indentSize": "tabSize",
"editor.stickyScroll.enabled": false,
"editor.minimap.enabled": false,
"editor.formatOnSave": true
}

View File

@@ -27,10 +27,6 @@ Please make sure your Pull Request adheres to the following guidelines:
If you would like to add or modify a translation, please see [How to add new languages to Stirling-PDF](HowToAddNewLanguage.md). Also, please create a Pull Request so others can use it!
## Docs
Documentation for Stirling-PDF is handled in a separate repository. Please see [Docs repository](https://github.com/Stirling-Tools/Stirling-Tools.github.io) or use "edit this page"-button at the bottom of each page at [https://stirlingtools.com/docs/](https://stirlingtools.com/docs/).
## Fixing Bugs or Adding a New Feature
First, make sure you've read the section [Pull Requests](#pull-requests).

View File

@@ -1,40 +0,0 @@
# New Database Backup and Import Functionality
**Full activation will take place on approximately January 5th, 2025!**
Why is the waiting time six months?
There are users who only install updates sporadically; if they skip the preparation, it can/will lead to data loss in the database.
## Functionality Overview
The newly introduced feature enhances the application with robust database backup and import capabilities. This feature is designed to ensure data integrity and provide a straightforward way to manage database backups. Here's how it works:
1. Automatic Backup Creation
- The system automatically creates a database backup every day at midnight. This ensures that there is always a recent backup available, minimizing the risk of data loss.
2. Manual Backup Export
- Admin actions that modify the user database trigger a manual export of the database. This keeps the backup up-to-date with the latest changes and provides an extra layer of data security.
3. Importing Database Backups
- Admin users can import a database backup either via the web interface or API endpoints. This allows for easy restoration of the database to a previous state in case of data corruption or other issues.
- The import process ensures that the database structure and data are correctly restored, maintaining the integrity of the application.
4. Managing Backup Files
- Admins can view a list of all existing backup files, along with their creation dates and sizes. This helps in managing storage and identifying the most recent or relevant backups.
- Backup files can be downloaded for offline storage or transferred to other environments, providing flexibility in database management.
- Unnecessary backup files can be deleted through the interface to free up storage space and maintain an organized backup directory.
## User Interface
### Web Interface
1. Upload SQL files to import database backups.
2. View details of existing backups, such as file names, creation dates, and sizes.
3. Download backup files for offline storage.
4. Delete outdated or unnecessary backup files.
### API Endpoints
1. Import database backups by uploading SQL files.
2. Download backup files.
3. Delete backup files.
This new functionality streamlines database management, ensuring that backups are always available and easy to manage, thus improving the reliability and resilience of the application.

View File

@@ -1,67 +1,69 @@
# Main stage
FROM alpine:3.20.3
# Copy necessary files
COPY scripts /scripts
COPY pipeline /pipeline
COPY src/main/resources/static/fonts/*.ttf /usr/share/fonts/opentype/noto/
#COPY src/main/resources/static/fonts/*.otf /usr/share/fonts/opentype/noto/
COPY build/libs/*.jar app.jar
ARG VERSION_TAG
# Set Environment Variables
ENV DOCKER_ENABLE_SECURITY=false \
VERSION_TAG=$VERSION_TAG \
JAVA_TOOL_OPTIONS="$JAVA_TOOL_OPTIONS -XX:MaxRAMPercentage=75" \
HOME=/home/stirlingpdfuser \
PUID=1000 \
PGID=1000 \
UMASK=022
FROM alpine:3.19.1
# JDK for app
RUN echo "@testing https://dl-cdn.alpinelinux.org/alpine/edge/main" | tee -a /etc/apk/repositories && \
echo "@testing https://dl-cdn.alpinelinux.org/alpine/edge/community" | tee -a /etc/apk/repositories && \
echo "@testing https://dl-cdn.alpinelinux.org/alpine/edge/testing" | tee -a /etc/apk/repositories && \
apk upgrade --no-cache -a && \
apk add --no-cache \
ca-certificates \
tzdata \
tini \
bash \
curl \
shadow \
su-exec \
openssl \
openssl-dev \
openjdk21-jre \
openjdk17-jre \
# Doc conversion
libreoffice \
# pdftohtml
poppler-utils \
# OCR MY PDF (unpaper for descew and other advanced features)
libreoffice@testing \
# OCR MY PDF (unpaper for descew and other advanced featues)
ocrmypdf \
tesseract-ocr-data-eng \
# CV
py3-opencv \
# python3/pip
python3 \
py3-pip && \
python3 && \
wget https://bootstrap.pypa.io/get-pip.py -qO - | python3 - --break-system-packages --no-cache-dir --upgrade && \
# uno unoconv and HTML
pip install --break-system-packages --no-cache-dir --upgrade unoconv WeasyPrint pdf2image pillow && \
mv /usr/share/tessdata /usr/share/tessdata-original && \
mkdir -p $HOME /configs /logs /customFiles /pipeline/watchedFolders /pipeline/finishedFolders && \
fc-cache -f -v && \
chmod +x /scripts/* && \
chmod +x /scripts/init.sh && \
# User permissions
addgroup -S stirlingpdfgroup && adduser -S stirlingpdfuser -G stirlingpdfgroup && \
chown -R stirlingpdfuser:stirlingpdfgroup $HOME /scripts /usr/share/fonts/opentype/noto /configs /customFiles /pipeline && \
chown stirlingpdfuser:stirlingpdfgroup /app.jar && \
tesseract --list-langs
pip install --break-system-packages --no-cache-dir --upgrade unoconv WeasyPrint && \
mv /usr/share/tessdata /usr/share/tessdata-original
EXPOSE 8080/tcp
ARG VERSION_TAG
# Set Environment Variables
ENV DOCKER_ENABLE_SECURITY=false \
HOME=/home/stirlingpdfuser \
VERSION_TAG=$VERSION_TAG \
JAVA_TOOL_OPTIONS="$JAVA_TOOL_OPTIONS -XX:MaxRAMPercentage=75"
# PUID=1000 \
# PGID=1000 \
# UMASK=022 \
# Copy necessary files
COPY scripts /scripts
COPY pipeline /pipeline
COPY src/main/resources/static/fonts/*.ttf /usr/share/fonts/opentype/noto
COPY src/main/resources/static/fonts/*.otf /usr/share/fonts/opentype/noto
COPY build/libs/*.jar app.jar
# Create user and group
##RUN groupadd -g $PGID stirlingpdfgroup && \
## useradd -u $PUID -g stirlingpdfgroup -s /bin/sh stirlingpdfuser && \
## mkdir -p $HOME && chown stirlingpdfuser:stirlingpdfgroup $HOME && \
# Set up necessary directories and permissions
RUN mkdir -p /configs /logs /customFiles /pipeline/watchedFolders /pipeline/finishedFolders && \
##&& \
## chown -R stirlingpdfuser:stirlingpdfgroup /scripts /usr/share/fonts/opentype/noto /usr/share/tesseract-ocr /configs /customFiles && \
## chown -R stirlingpdfuser:stirlingpdfgroup /usr/share/tesseract-ocr-original && \
# Set font cache and permissions
fc-cache -f -v && \
chmod +x /scripts/*
## chown stirlingpdfuser:stirlingpdfgroup /app.jar && \
## chmod +x /scripts/init.sh
EXPOSE 8080
# Set user and run command
##USER stirlingpdfuser
ENTRYPOINT ["tini", "--", "/scripts/init.sh"]
CMD ["java", "-Dfile.encoding=UTF-8", "-jar", "/app.jar"]

View File

@@ -1,83 +0,0 @@
# Build the application
FROM gradle:8.7-jdk17 AS build
# Set the working directory
WORKDIR /app
# Copy the entire project to the working directory
COPY . .
# Build the application with DOCKER_ENABLE_SECURITY=false
RUN DOCKER_ENABLE_SECURITY=true \
./gradlew clean build
# Main stage
FROM alpine:3.20.3
# Copy necessary files
COPY scripts /scripts
COPY pipeline /pipeline
COPY src/main/resources/static/fonts/*.ttf /usr/share/fonts/opentype/noto/
COPY --from=build /app/build/libs/*.jar app.jar
ARG VERSION_TAG
# Set Environment Variables
ENV DOCKER_ENABLE_SECURITY=false \
VERSION_TAG=$VERSION_TAG \
JAVA_TOOL_OPTIONS="$JAVA_TOOL_OPTIONS -XX:MaxRAMPercentage=75" \
HOME=/home/stirlingpdfuser \
PUID=1000 \
PGID=1000 \
UMASK=022 \
FAT_DOCKER=true \
INSTALL_BOOK_AND_ADVANCED_HTML_OPS=false
# JDK for app
RUN echo "@testing https://dl-cdn.alpinelinux.org/alpine/edge/main" | tee -a /etc/apk/repositories && \
echo "@testing https://dl-cdn.alpinelinux.org/alpine/edge/community" | tee -a /etc/apk/repositories && \
echo "@testing https://dl-cdn.alpinelinux.org/alpine/edge/testing" | tee -a /etc/apk/repositories && \
apk upgrade --no-cache -a && \
apk add --no-cache \
ca-certificates \
tzdata \
tini \
bash \
curl \
shadow \
su-exec \
openssl \
openssl-dev \
openjdk21-jre \
# Doc conversion
libreoffice \
# pdftohtml
poppler-utils \
# OCR MY PDF (unpaper for descew and other advanced featues)
ocrmypdf \
tesseract-ocr-data-eng \
font-terminus font-dejavu font-noto font-noto-cjk font-awesome font-noto-extra \
# CV
py3-opencv \
# python3/pip
python3 \
py3-pip && \
# uno unoconv and HTML
pip install --break-system-packages --no-cache-dir --upgrade unoconv WeasyPrint pdf2image pillow && \
mv /usr/share/tessdata /usr/share/tessdata-original && \
mkdir -p $HOME /configs /logs /customFiles /pipeline/watchedFolders /pipeline/finishedFolders && \
fc-cache -f -v && \
chmod +x /scripts/* && \
chmod +x /scripts/init.sh && \
# User permissions
addgroup -S stirlingpdfgroup && adduser -S stirlingpdfuser -G stirlingpdfgroup && \
chown -R stirlingpdfuser:stirlingpdfgroup $HOME /scripts /usr/share/fonts/opentype/noto /configs /customFiles /pipeline && \
chown stirlingpdfuser:stirlingpdfgroup /app.jar && \
tesseract --list-langs
EXPOSE 8080/tcp
# Set user and run command
ENTRYPOINT ["tini", "--", "/scripts/init.sh"]
CMD ["java", "-Dfile.encoding=UTF-8", "-jar", "/app.jar"]

61
Dockerfile-lite Normal file
View File

@@ -0,0 +1,61 @@
# use alpine
FROM alpine:3.19.1
ARG VERSION_TAG
# Set Environment Variables
ENV DOCKER_ENABLE_SECURITY=false \
HOME=/home/stirlingpdfuser \
VERSION_TAG=$VERSION_TAG \
JAVA_TOOL_OPTIONS="$JAVA_TOOL_OPTIONS -XX:MaxRAMPercentage=75"
# PUID=1000 \
# PGID=1000 \
# UMASK=022 \
# Copy necessary files
COPY scripts/download-security-jar.sh /scripts/download-security-jar.sh
COPY scripts/init-without-ocr.sh /scripts/init-without-ocr.sh
COPY pipeline /pipeline
COPY src/main/resources/static/fonts/*.ttf /usr/share/fonts/opentype/noto
COPY src/main/resources/static/fonts/*.otf /usr/share/fonts/opentype/noto
COPY build/libs/*.jar app.jar
RUN echo "@testing https://dl-cdn.alpinelinux.org/alpine/edge/main" | tee -a /etc/apk/repositories && \
echo "@testing https://dl-cdn.alpinelinux.org/alpine/edge/community" | tee -a /etc/apk/repositories && \
echo "@testing https://dl-cdn.alpinelinux.org/alpine/edge/testing" | tee -a /etc/apk/repositories && \
apk add --no-cache \
ca-certificates \
tzdata \
tini \
bash \
curl \
openjdk17-jre \
# Doc conversion
libreoffice@testing \
# python and pip
python3 && \
wget https://bootstrap.pypa.io/get-pip.py -qO - | python3 - --break-system-packages --no-cache-dir --upgrade && \
# uno unoconv and HTML
pip install --break-system-packages --no-cache-dir --upgrade unoconv WeasyPrint && \
# Create user and group
#RUN groupadd -g $PGID stirlingpdfgroup && \
# useradd -u $PUID -g stirlingpdfgroup -s /bin/sh stirlingpdfuser && \
# mkdir -p $HOME && chown stirlingpdfuser:stirlingpdfgroup $HOME
# Set up necessary directories and permissions
mkdir -p /configs /logs /customFiles /pipeline/watchedFolders /pipeline/finishedFolders && \
# chown -R stirlingpdfuser:stirlingpdfgroup /usr/share/fonts/opentype/noto /configs /customFiles
# Set font cache and permissions
fc-cache -f -v && \
chmod +x /scripts/*.sh
# chown stirlingpdfuser:stirlingpdfgroup /app.jar
# Set environment variables
ENV ENDPOINTS_GROUPS_TO_REMOVE=OpenCV,OCRmyPDF
ENV DOCKER_ENABLE_SECURITY=false
EXPOSE 8080
# Run the application
#USER stirlingpdfuser
ENTRYPOINT ["tini", "--", "/scripts/init-without-ocr.sh"]
CMD ["java", "-Dfile.encoding=UTF-8", "-jar", "/app.jar"]

View File

@@ -1,5 +1,5 @@
# use alpine
FROM alpine:3.20.3
FROM alpine:3.19.1
ARG VERSION_TAG
@@ -7,10 +7,10 @@ ARG VERSION_TAG
ENV DOCKER_ENABLE_SECURITY=false \
HOME=/home/stirlingpdfuser \
VERSION_TAG=$VERSION_TAG \
JAVA_TOOL_OPTIONS="$JAVA_TOOL_OPTIONS -XX:MaxRAMPercentage=75" \
PUID=1000 \
PGID=1000 \
UMASK=022
JAVA_TOOL_OPTIONS="$JAVA_TOOL_OPTIONS -XX:MaxRAMPercentage=75"
# PUID=1000 \
# PGID=1000 \
# UMASK=022 \
# Copy necessary files
COPY scripts/download-security-jar.sh /scripts/download-security-jar.sh
@@ -18,32 +18,34 @@ COPY scripts/init-without-ocr.sh /scripts/init-without-ocr.sh
COPY pipeline /pipeline
COPY build/libs/*.jar app.jar
# Create user and group using Alpine's addgroup and adduser
#RUN addgroup -g $PGID stirlingpdfgroup && \
# adduser -u $PUID -G stirlingpdfgroup -s /bin/sh -D stirlingpdfuser && \
# mkdir -p $HOME && chown stirlingpdfuser:stirlingpdfgroup $HOME
# Set up necessary directories and permissions
RUN echo "@testing https://dl-cdn.alpinelinux.org/alpine/edge/main" | tee -a /etc/apk/repositories && \
echo "@testing https://dl-cdn.alpinelinux.org/alpine/edge/community" | tee -a /etc/apk/repositories && \
echo "@testing https://dl-cdn.alpinelinux.org/alpine/edge/testing" | tee -a /etc/apk/repositories && \
apk upgrade --no-cache -a && \
#RUN mkdir -p /scripts /configs /customFiles && \
# chown -R stirlingpdfuser:stirlingpdfgroup /scripts /configs /customFiles /logs /pipeline /pipeline/defaultWebUIConfigs /pipeline/watchedFolders /pipeline/finishedFolders
RUN mkdir /configs /logs /customFiles && \
# Set font cache and permissions
#RUN chown stirlingpdfuser:stirlingpdfgroup /app.jar
chmod +x /scripts/*.sh && \
apk add --no-cache \
ca-certificates \
tzdata \
tini \
bash \
curl \
shadow \
su-exec \
openjdk21-jre && \
# User permissions
mkdir /configs /logs /customFiles && \
chmod +x /scripts/*.sh && \
addgroup -S stirlingpdfgroup && adduser -S stirlingpdfuser -G stirlingpdfgroup && \
chown -R stirlingpdfuser:stirlingpdfgroup $HOME /scripts /configs /customFiles /pipeline && \
chown stirlingpdfuser:stirlingpdfgroup /app.jar
openjdk17-jre && \
echo "@testing https://dl-cdn.alpinelinux.org/alpine/edge/main" | tee -a /etc/apk/repositories && \
echo "@testing https://dl-cdn.alpinelinux.org/alpine/edge/community" | tee -a /etc/apk/repositories && \
echo "@testing https://dl-cdn.alpinelinux.org/alpine/edge/testing" | tee -a /etc/apk/repositories
# Set environment variables
ENV ENDPOINTS_GROUPS_TO_REMOVE=CLI
EXPOSE 8080/tcp
EXPOSE 8080
ENTRYPOINT ["tini", "--", "/scripts/init-without-ocr.sh"]
# Run the application
ENTRYPOINT ["tini", "--", "/scripts/init-without-ocr.sh"]
CMD ["java", "-Dfile.encoding=UTF-8", "-jar", "/app.jar"]

View File

@@ -1,47 +1,46 @@
| Operation | PageOps | Convert | Security | Other | CLI | Python | OpenCV | LibreOffice | OCRmyPDF | Java | Javascript |
| ------------------- | ------- | ------- | -------- | ----- | --- | ------ | ------ | ----------- | -------- | ---- | ---------- |
| adjust-contrast | ✔️ | | | | | | | | | | ✔️ |
| auto-split-pdf | ✔️ | | | | | | | | | ✔️ | |
| crop | ✔️ | | | | | | | | | ✔️ | |
| extract-page | ✔️ | | | | | | | | | ✔️ | |
| merge-pdfs | ✔️ | | | | | | | | | ✔️ | |
| multi-page-layout | ✔️ | | | | | | | | | ✔️ | |
| pdf-organizer | ✔️ | | | | | | | | | ✔️ | ✔️ |
| pdf-to-single-page | ✔️ | | | | | | | | | ✔️ | |
| remove-pages | ✔️ | | | | | | | | | ✔️ | |
| rotate-pdf | ✔️ | | | | | | | | | ✔️ | |
| scale-pages | ✔️ | | | | | | | | | ✔️ | |
| split-pdfs | ✔️ | | | | | | | | | ✔️ | |
| file-to-pdf | | ✔️ | | | ✔️ | | | ✔️ | | | |
| img-to-pdf | | ✔️ | | | | | | | | ✔️ | |
| pdf-to-html | | ✔️ | | | ✔️ | | | ✔️ | | | |
| pdf-to-img | | ✔️ | | | | ✔️ | | | | ✔️ | |
| pdf-to-pdfa | | ✔️ | | | ✔️ | | | | ✔️ | | |
| pdf-to-markdown | | ✔️ | | | | | | | | ✔️ | |
| pdf-to-presentation | | ✔️ | | | ✔️ | | | ✔️ | | | |
| pdf-to-text | | ✔️ | | | ✔️ | | | ✔️ | | | |
| pdf-to-word | | ✔️ | | | ✔️ | | | ✔️ | | | |
| pdf-to-xml | | ✔️ | | | ✔️ | | | ✔️ | | | |
| xlsx-to-pdf | | ✔️ | | | ✔️ | | | ✔️ | | | |
| add-password | | | ✔️ | | | | | | | ✔️ | |
| add-watermark | | | ✔️ | | | | | | | ✔️ | |
| cert-sign | | | ✔️ | | | | | | | ✔️ | |
| remove-cert-sign | | | ✔️ | | | | | | | ✔️ | |
| change-permissions | | | ✔️ | | | | | | | ✔️ | |
| remove-password | | | ✔️ | | | | | | | ✔️ | |
| sanitize-pdf | | | ✔️ | | | | | | | ✔️ | |
| add-image | | | | ✔️ | | | | | | ✔️ | |
| add-page-numbers | | | | ✔️ | | | | | | ✔️ | |
| auto-rename | | | | ✔️ | | | | | | ✔️ | |
| change-metadata | | | | ✔️ | | | | | | ✔️ | |
| compare | | | | ✔️ | | | | | | | ✔️ |
| compress-pdf | | | | ✔️ | ✔️ | | | | ✔️ | | |
| extract-image-scans | | | | ✔️ | ✔️ | ✔️ | ✔️ | | | | |
| extract-images | | | | ✔️ | | | | | | ✔️ | |
| flatten | | | | ✔️ | | | | | | | ✔️ |
| get-info-on-pdf | | | | ✔️ | | | | | | ✔️ | |
| ocr-pdf | | | | ✔️ | ✔️ | | | | ✔️ | | |
| remove-blanks | | | | ✔️ | ✔️ | ✔️ | ✔️ | | | | |
| repair | | | | ✔️ | ✔️ | | | ✔️ | | | |
| show-javascript | | | | ✔️ | | | | | | | ✔️ |
| sign | | | | ✔️ | | | | | | | ✔️ |
| Operation | PageOps | Convert | Security | Other | CLI | Python | OpenCV | LibreOffice | OCRmyPDF | Java | Javascript |
|---------------------|---------|---------|----------|-------|------|--------|--------|-------------|----------|----------|------------|
| adjust-contrast | ✔️ | | | | | | | | | | ✔️ |
| auto-split-pdf | ✔️ | | | | | | | | | ✔️ | |
| crop | ✔️ | | | | | | | | | ✔️ | |
| extract-page | ✔️ | | | | | | | | | ✔️ | |
| merge-pdfs | ✔️ | | | | | | | | | ✔️ | |
| multi-page-layout | ✔️ | | | | | | | | | ✔️ | |
| pdf-organizer | ✔️ | | | | | | | | | ✔️ | ✔️ |
| pdf-to-single-page | ✔️ | | | | | | | | | ✔️ | |
| remove-pages | ✔️ | | | | | | | | | ✔️ | |
| rotate-pdf | ✔️ | | | | | | | | | ✔️ | |
| scale-pages | ✔️ | | | | | | | | | ✔️ | |
| split-pdfs | ✔️ | | | | | | | | | ✔️ | |
| file-to-pdf | | ✔️ | | | ✔️ | | | ✔️ | | | |
| img-to-pdf | | ✔️ | | | | | | | | ✔️ | |
| pdf-to-html | | ✔️ | | | ✔️ | | | ✔️ | | | |
| pdf-to-img | | ✔️ | | | | | | | | ✔️ | |
| pdf-to-pdfa | | ✔️ | | | ✔️ | | | | ✔️ | | |
| pdf-to-markdown | | ✔️ | | | | | | | | ✔️ | |
| pdf-to-presentation | | ✔️ | | | ✔️ | | | ✔️ | | | |
| pdf-to-text | | ✔️ | | | ✔️ | | | ✔️ | | | |
| pdf-to-word | | ✔️ | | | ✔️ | | | ✔️ | | | |
| pdf-to-xml | | ✔️ | | | ✔️ | | | ✔️ | | | |
| xlsx-to-pdf | | ✔️ | | | ✔️ | | | ✔️ | | | |
| add-password | | | ✔️ | | | | | | | ✔️ | |
| add-watermark | | | ✔️ | | | | | | | ✔️ | |
| cert-sign | | | ✔️ | | | | | | | ✔️ | |
| change-permissions | | | ✔️ | | | | | | | ✔️ | |
| remove-password | | | ✔️ | | | | | | | ✔️ | |
| sanitize-pdf | | | ✔️ | | | | | | | ✔️ | |
| add-image | | | | ✔️ | | | | | | ✔️ | |
| add-page-numbers | | | | ✔️ | | | | | | ✔️ | |
| auto-rename | | | | ✔️ | | | | | | ✔️ | |
| change-metadata | | | | ✔️ | | | | | | ✔️ | |
| compare | | | | ✔️ | | | | | | | ✔️ |
| compress-pdf | | | | ✔️ | ✔️ | | | | ✔️ | | |
| extract-image-scans | | | | ✔️ | ✔️ | ✔️ | ✔️ | | | | |
| extract-images | | | | ✔️ | | | | | | ✔️ | |
| flatten | | | | ✔️ | | | | | | | ✔️ |
| get-info-on-pdf | | | | ✔️ | | | | | | ✔️ | |
| ocr-pdf | | | | ✔️ | ✔️ | | | | ✔️ | | |
| remove-blanks | | | | ✔️ | ✔️ | ✔️ | ✔️ | | | | |
| repair | | | | ✔️ | ✔️ | | | ✔️ | | | |
| show-javascript | | | | ✔️ | | | | | | | ✔️ |
| sign | | | | ✔️ | | | | | | | ✔️ |

View File

@@ -1,5 +1,13 @@
## User Guide for Local Directory Scanning and File Processing
### Whilst Pipelines are in alpha...
You must enable this alpha functionality by setting
```yaml
system:
enableAlphaFunctionality: true
```
To true like in the above for your `/config/settings.yml` file, after restarting Stirling-PDF you should see both UI and folder scanning enabled.
### Setting Up Watched Folders:
- Create a folder where you want your files to be monitored. This is your 'watched folder'.
- The default directory for this is `./pipeline/watchedFolders/`

View File

@@ -1,6 +1,7 @@
<p align="center"><img src="https://raw.githubusercontent.com/Stirling-Tools/Stirling-PDF/main/docs/stirling.png" width="80" ><br><h1 align="center">Stirling-PDF</h1>
</p>
# How to add new languages to Stirling-PDF
Fork Stirling-PDF and make a new branch out of Main
@@ -8,49 +9,30 @@ Fork Stirling-PDF and make a new branch out of Main
Then add reference to the language in the navbar by adding a new language entry to the dropdown
https://github.com/Stirling-Tools/Stirling-PDF/blob/main/src/main/resources/templates/fragments/languages.html
and add a flag svg file to
and add a flag svg file to
https://github.com/Stirling-Tools/Stirling-PDF/tree/main/src/main/resources/static/images/flags
Any SVG flags are fine, i got most of mine from [here](https://flagicons.lipis.dev/)
If your language isn't represented by a flag just find whichever closely matches it, such as for Arabic i chose Saudi Arabia
If your language isnt represented by a flag just find whichever closely matches it, such as for Arabic i chose Saudi Arabia
For example to add Polish you would add
For example to add Polish you would add
```html
<a class="dropdown-item lang_dropdown-item" href="" data-language-code="pl_PL">
<img src="images/flags/pl.svg" alt="icon" width="20" height="15"> Polski
</a>
```
The data-language-code is the code used to reference the file in the next step.
Start by copying the existing english property file
Start by copying the existing english property file
[https://github.com/Stirling-Tools/Stirling-PDF/blob/main/src/main/resources/messages_en_GB.properties](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/src/main/resources/messages_en_GB.properties)
Copy and rename it to messages_{your data-language-code here}.properties, in the polish example you would set the name to messages_pl_PL.properties
Then simply translate all property entries within that file and make a PR into main for others to use!
If you do not have a java IDE i am happy to verify the changes worked once you raise PR (but won't be able to verify the translations themselves)
If you do not have a java IDE i am happy to verify the changes worked once you raise PR (but wont be able to verify the translations themselves)
## Handling Untranslatable Strings
Sometimes, certain strings in the properties file may not require translation because they are the same in the target language or are universal (like names of protocols, certain terminologies, etc.). To ensure accurate statistics for language progress, these strings should be added to the `ignore_translation.toml` file located in the `scripts` directory. This will exclude them from the translation progress calculations.
For example, if the English string error=Error does not need translation in Polish, add it to the ignore_translation.toml under the Polish section:
```toml
[pl_PL]
ignore = [
"language.direction", # Existing entries
"error" # Add new entries here
]
```
## Add New Translation Tags
- **Important**: If you add any new translation tags, they must first be added to the `messages_en_GB.properties` file. This ensures consistency across all language files.
- New translation tags **must be added** to the `messages_en_GB.properties` file to maintain a reference for other languages.
- After adding the new tags to `messages_en_GB.properties`, add and translate them in the respective language file (e.g., `messages_pl_PL.properties`).
Make sure to place the entry under the correct language section. This helps maintain the accuracy of translation progress statistics and ensures that the translation tool or scripts do not misinterpret the completion rate.

View File

@@ -2,12 +2,12 @@
This document provides instructions on how to add additional language packs for the OCR tab in Stirling-PDF, both inside and outside of Docker.
## My OCR used to work and now doesn't!
The paths have changed for the tessadata locations on new docker images, please use ``/usr/share/tessdata`` (Others should still work for backwards compatibility but might not)
## My OCR used to work and now doesnt!
Please update your tesseract docker volume path version from 4.00 to 5
## How does the OCR Work
Stirling-PDF uses [OCRmyPDF](https://github.com/ocrmypdf/OCRmyPDF) which in turn uses tesseract for its text recognition.
All credit goes to them for this awesome work!
All credit goes to them for this awesome work!
## Language Packs
@@ -27,7 +27,7 @@ Depending on your requirements, you can choose the appropriate language pack for
#### Docker
If you are using Docker, you need to expose the Tesseract tessdata directory as a volume in order to use the additional language packs.
If you are using Docker, you need to expose the Tesseract tessdata directory as a volume in order to use the additional language packs.
#### Docker Compose
Modify your `docker-compose.yml` file to include the following volume configuration:

687
LICENSE
View File

@@ -1,21 +1,674 @@
MIT License
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (c) 2024 Stirling Tools
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
Preamble
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>.

View File

@@ -14,7 +14,7 @@ You could theoretically use a Distrobox/Toolbox, if your Distribution has old or
Install the following software, if not already installed:
- Java 17 or later (21 recommended)
- Java 17 or later
- Gradle 7.0 or later (included within repo so not needed on server)
@@ -42,25 +42,17 @@ For Debian-based systems, you can use the following command:
```bash
sudo apt-get update
sudo apt-get install -y git automake autoconf libtool libleptonica-dev pkg-config zlib1g-dev make g++ openjdk-21-jdk python3 python3-pip
sudo apt-get install -y git automake autoconf libtool libleptonica-dev pkg-config zlib1g-dev make g++ java-17-openjdk python3 python3-pip
```
For Fedora-based systems use this command:
For Fedora-based systems use this command:
```bash
sudo dnf install -y git automake autoconf libtool leptonica-devel pkg-config zlib-devel make gcc-c++ java-21-openjdk python3 python3-pip
```
For non-root users with Nix Package Manager, use the following command:
```bash
nix-channel --update
nix-env -iA nixpkgs.jdk21 nixpkgs.git nixpkgs.python38 nixpkgs.gnumake nixpkgs.libgcc nixpkgs.automake nixpkgs.autoconf nixpkgs.libtool nixpkgs.pkg-config nixpkgs.zlib nixpkgs.leptonica
sudo dnf install -y git automake autoconf libtool leptonica-devel pkg-config zlib-devel make gcc-c++ java-17-openjdk python3 python3-pip
```
### Step 2: Clone and Build jbig2enc (Only required for certain OCR functionality)
For Debian and Fedora, you can build it from source using the following commands:
```bash
mkdir ~/.git
cd ~/.git &&\
@@ -72,13 +64,8 @@ make &&\
sudo make install
```
For Nix, you will face `Leptonica not detected`. Bypass this by installing it directly using the following command:
```bash
nix-env -iA nixpkgs.jbig2enc
```
### Step 3: Install Additional Software
Next we need to install LibreOffice for conversions, ocrmypdf for OCR, and opencv for pattern recognition functionality.
Next we need to install LibreOffice for conversions, ocrmypdf for OCR, and opencv for patern recognition functionality.
Install the following software:
@@ -108,21 +95,14 @@ For Debian-based systems, you can use the following command:
```bash
sudo apt-get install -y libreoffice-writer libreoffice-calc libreoffice-impress unpaper ocrmypdf
pip3 install uno opencv-python-headless unoconv pngquant WeasyPrint --break-system-packages
pip3 install uno opencv-python-headless unoconv pngquant WeasyPrint
```
For Fedora:
```bash
sudo dnf install -y libreoffice-writer libreoffice-calc libreoffice-impress unpaper ocrmypdf
pip3 install uno opencv-python-headless unoconv pngquant WeasyPrint
```
For Nix:
```bash
nix-env -iA nixpkgs.unpaper nixpkgs.libreoffice nixpkgs.ocrmypdf nixpkgs.poppler_utils
pip3 install uno opencv-python-headless unoconv pngquant WeasyPrint
pip3 install uno opencv-python-headless unoconv pngquant WeasyPrint
```
### Step 4: Clone and Build Stirling-PDF
@@ -135,12 +115,13 @@ chmod +x ./gradlew &&\
./gradlew build
```
### Step 5: Move jar to desired location
After the build process, a `.jar` file will be generated in the `build/libs` directory.
You can move this file to a desired location, for example, `/opt/Stirling-PDF/`.
You must also move the Script folder within the Stirling-PDF repo that you have downloaded to this directory.
This folder is required for the python scripts using OpenCV.
This folder is required for the python scripts using OpenCV
```bash
sudo mkdir /opt/Stirling-PDF &&\
@@ -148,25 +129,19 @@ sudo mv ./build/libs/Stirling-PDF-*.jar /opt/Stirling-PDF/ &&\
sudo mv scripts /opt/Stirling-PDF/ &&\
echo "Scripts installed."
```
For non-root users, you can just keep the jar in the main directory of Stirling-PDF using the following command:
```bash
mv ./build/libs/Stirling-PDF-*.jar ./Stirling-PDF-*.jar
```
### Step 6: Other files
#### OCR
If you plan to use the OCR (Optical Character Recognition) functionality, you might need to install language packs for Tesseract if running non-english scanning.
##### Installing Language Packs
Easiest is to use the langpacks provided by your repositories. Skip the other steps.
Easiest is to use the langpacks provided by your repositories. Skip the other steps
Manual:
1. Download the desired language pack(s) by selecting the `.traineddata` file(s) for the language(s) you need.
2. Place the `.traineddata` files in the Tesseract tessdata directory: `/usr/share/tessdata`
3. Please view [OCRmyPDF install guide](https://ocrmypdf.readthedocs.io/en/latest/installation.html) for more info.
3.
Please view [OCRmyPDF install guide](https://ocrmypdf.readthedocs.io/en/latest/installation.html) for more info.
**IMPORTANT:** DO NOT REMOVE EXISTING `eng.traineddata`, IT'S REQUIRED.
Debian based systems, install languages with this command:
@@ -196,38 +171,14 @@ dnf search -C tesseract-langpack-
rpm -qa | grep tesseract-langpack | sed 's/tesseract-langpack-//g'
```
Nix:
```bash
nix-env -iA nixpkgs.tesseract
```
**Note:** Nix Package Manager pre-installs almost all the language packs when tesseract is installed.
### Step 7: Run Stirling-PDF
Those who have pushed to the root directory, run the following commands:
```bash
./gradlew bootRun
or
java -jar /opt/Stirling-PDF/Stirling-PDF-*.jar
```
Since libreoffice, soffice, and conversion tools have their dbus_tmp_dir set as `dbus_tmp_dir="/run/user/$(id -u)/libreoffice-dbus"`, you might get the following error when using their endpoints:
```
[Thread-7] INFO s.s.SPDF.utils.ProcessExecutor - mkdir: cannot create directory /run/user/1501: Permission denied
```
To resolve this, before starting the Stirling-PDF, you have to set the environment variable to a directory you have write access to by using the following commands:
```bash
mkdir temp
export DBUS_SESSION_BUS_ADDRESS="unix:path=./temp"
./gradlew bootRun
or
java -jar ./Stirling-PDF-*.jar
```
### Step 8: Adding a Desktop icon
This will add a modified Appstarter to your Appmenu.
@@ -251,19 +202,7 @@ EOF
Note: Currently the app will run in the background until manually closed.
### Optional: Changing the host and port of the application:
To override the default configuration, you can add the following to `/.git/Stirling-PDF/configs/custom_settings.yml` file:
```bash
server:
host: 0.0.0.0
port: 3000
```
**Note:** This file is created after the first application launch. To have it before that, you can create the directory and add the file yourself.
### Optional: Run Stirling-PDF as a service (requires root).
### Optional: Run Stirling-PDF as a service
First create a .env file, where you can store environment variables:
```
@@ -300,7 +239,6 @@ WantedBy=multi-user.target
```
Notify systemd that it has to rebuild its internal service database (you have to run this command every time you make a change in the service file):
```
sudo systemctl daemon-reload
```
@@ -326,10 +264,10 @@ sudo systemctl restart stirlingpdf.service
Remember to set the necessary environment variables before running the project if you want to customize the application the list can be seen in the main readme.
You can do this in the terminal by using the `export` command or -D argument to java -jar command:
You can do this in the terminal by using the `export` command or -D arguements to java -jar command:
```bash
export APP_HOME_NAME="Stirling PDF"
or
-DAPP_HOME_NAME="Stirling PDF"
-DAPP_HOME_NAME="Stirling PDF"
```

View File

@@ -1,6 +1,13 @@
# Pipeline Configuration and Usage Tutorial
- Configure the pipeline config file and input files to run files against it
- For reuse, download the config file and re-upload it when needed, or place it in /pipeline/defaultWebUIConfigs/ to auto-load in the web UI for all users
## Whilst Pipelines are in alpha...
You must enable this alpha functionality by setting
```yaml
system:
enableAlphaFunctionality: true
```
To true like in the above for your `/config/settings.yml` file, after restarting Stirling-PDF you should see both UI and folder scanning enabled.
## Steps to Configure and Use Your Pipeline
@@ -33,12 +40,3 @@
10. **Note on Web UI Limitations**
- The current web UI version does not support operations that require multiple different types of inputs, such as adding a separate image to a PDF.
### Current Limitations
- Cannot have more than one of the same operation
- Cannot input additional files via UI
- All files and operations run in serial mode

220
README.md
View File

@@ -1,36 +1,34 @@
<p align="center"><img src="https://raw.githubusercontent.com/Stirling-Tools/Stirling-PDF/main/docs/stirling.png" width="80" ></p>
<h1 align="center">Stirling-PDF</h1>
<p align="center"><img src="https://raw.githubusercontent.com/Stirling-Tools/Stirling-PDF/main/docs/stirling.png" width="80" ><br><h1 align="center">Stirling-PDF</h1>
</p>
[![Docker Pulls](https://img.shields.io/docker/pulls/frooodle/s-pdf)](https://hub.docker.com/r/frooodle/s-pdf)
[![Discord](https://img.shields.io/discord/1068636748814483718?label=Discord)](https://discord.gg/Cn8pWhQRxZ)
[![Docker Image Version (tag latest semver)](https://img.shields.io/docker/v/frooodle/s-pdf/latest)](https://github.com/Stirling-Tools/Stirling-PDF/)
[![GitHub Repo stars](https://img.shields.io/github/stars/stirling-tools/stirling-pdf?style=social)](https://github.com/Stirling-Tools/stirling-pdf)
[![Paypal Donate](https://img.shields.io/badge/Paypal%20Donate-yellow?style=flat&logo=paypal)](https://www.paypal.com/paypalme/froodleplex)
[![Github Sponser](https://img.shields.io/badge/Github%20Sponsor-yellow?style=flat&logo=github)](https://github.com/sponsors/Frooodle)
[![Deploy to DO](https://www.deploytodo.com/do-btn-blue.svg)](https://cloud.digitalocean.com/apps/new?repo=https://github.com/Stirling-Tools/Stirling-PDF/tree/digitalOcean&refcode=c3210994b1af)
[<img src="https://www.ssdnodes.com/wp-content/uploads/2023/11/footer-logo.svg" alt="Name" height="40">](https://www.ssdnodes.com/manage/aff.php?aff=2216&register=true)
This is a robust, locally hosted web-based PDF manipulation tool using Docker. It enables you to carry out various operations on PDF files, including splitting, merging, converting, reorganizing, adding images, rotating, compressing, and more. This locally hosted web application has evolved to encompass a comprehensive set of features, addressing all your PDF requirements.
This is a powerful locally hosted web based PDF manipulation tool using docker that allows you to perform various operations on PDF files, such as splitting merging, converting, reorganizing, adding images, rotating, compressing, and more. This locally hosted web application started as a 100% ChatGPT-made application and has evolved to include a wide range of features to handle all your PDF needs.
Stirling PDF does not initiate any outbound calls for record-keeping or tracking purposes.
Stirling PDF makes no outbound calls for any record keeping or tracking.
All files and PDFs exist either exclusively on the client side, reside in server memory only during task execution, or temporarily reside in a file solely for the execution of the task. Any file downloaded by the user will have been deleted from the server by that point.
![stirling-home](images/stirling-home.jpg)
![stirling-home](images/stirling-home.png)
## Features
- Dark mode support.
- Custom download options
- Custom download options (see [here](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/images/settings.png) for example)
- Parallel file processing and downloads
- Custom 'Pipelines' to run multiple features in a queue
- API for integration with external scripts
- Optional Login and Authentication support (see [here](https://github.com/Stirling-Tools/Stirling-PDF/tree/main#login-authentication) for documentation)
- Database Backup and Import (see [here](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/DATABASE.md) for documentation)
## **PDF Features**
### **Page Operations**
- View and modify PDFs - View multi page PDFs with custom viewing sorting and searching. Plus on page edit features like annotate, draw and adding text and images. (Using PDF.js with Joxit and Liberation.Liberation fonts)
- Full interactive GUI for merging/splitting/rotating/moving PDFs and their pages.
- Merge multiple PDFs together into a single resultant file.
@@ -45,10 +43,8 @@ All files and PDFs exist either exclusively on the client side, reside in server
- Auto Split PDF (With physically scanned page dividers).
- Extract page(s).
- Convert PDF to a single page.
- Overlay PDFs ontop of each other
### **Conversion Operations**
- Convert PDFs to and from images.
- Convert any common file to PDF (using LibreOffice).
- Convert PDF to Word/Powerpoint/Others (using LibreOffice).
@@ -57,7 +53,6 @@ All files and PDFs exist either exclusively on the client side, reside in server
- Markdown to PDF.
### **Security & Permissions**
- Add and remove passwords.
- Change/set PDF Permissions.
- Add watermark(s).
@@ -66,7 +61,6 @@ All files and PDFs exist either exclusively on the client side, reside in server
- Auto-redact text.
### **Other Operations**
- Add/Generate/Write signatures.
- Repair PDFs.
- Detect and remove blank pages.
@@ -82,14 +76,12 @@ All files and PDFs exist either exclusively on the client side, reside in server
- Edit metadata.
- Flatten PDFs.
- Get all information on a PDF to view or export as JSON.
- Show/Detect embedded Javascript
For a overview of the tasks and the technology each uses please view [Endpoint-groups.md](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/Endpoint-groups.md)
Demo of the app is available [here](https://stirlingpdf.io).
Demo of the app is available [here](https://stirlingpdf.io). username: demo, password: demo
## Technologies used
- Spring Boot + Thymeleaf
- [PDFBox](https://github.com/apache/pdfbox/tree/trunk)
- [LibreOffice](https://www.libreoffice.org/discover/libreoffice/) for advanced conversions
@@ -100,48 +92,37 @@ Demo of the app is available [here](https://stirlingpdf.io).
- [PDF-LIB.js](https://github.com/Hopding/pdf-lib)
## How to use
### Windows
For windows users download the latest Stirling-PDF.exe from our [release](https://github.com/Stirling-Tools/Stirling-PDF/releases) section or by clicking [here](https://github.com/Stirling-Tools/Stirling-PDF/releases/latest/download/Stirling-PDF.exe)
### Locally
Please view https://github.com/Stirling-Tools/Stirling-PDF/blob/main/LocalRunGuide.md
### Docker / Podman
https://hub.docker.com/r/frooodle/s-pdf
Stirling PDF has 3 different versions, a Full version and ultra-Lite version as well as a 'Fat' version. Depending on the types of features you use you may want a smaller image to save on space.
Stirling PDF has 3 different versions, a Full version, Lite, and ultra-Lite. Depending on the types of features you use you may want a smaller image to save on space.
To see what the different versions offer please look at our [version mapping](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/Version-groups.md)
For people that don't mind about space optimization just use the latest tag.
![Docker Image Size (tag)](https://img.shields.io/docker/image-size/frooodle/s-pdf/latest?label=Stirling-PDF%20Full)
![Docker Image Size (tag)](https://img.shields.io/docker/image-size/frooodle/s-pdf/latest-lite?label=Stirling-PDF%20Lite)
![Docker Image Size (tag)](https://img.shields.io/docker/image-size/frooodle/s-pdf/latest-ultra-lite?label=Stirling-PDF%20Ultra-Lite)
![Docker Image Size (tag)](https://img.shields.io/docker/image-size/frooodle/s-pdf/latest-fat?label=Stirling-PDF%20Fat)
Please note in below examples you may need to change the volume paths as needed, current examples install them to the current working directory
eg ``./extraConfigs:/configs`` to ``/opt/stirlingpdf/extraConfigs:/configs``
### Docker Run
Docker Run
```bash
docker run -d \
-p 8080:8080 \
-v ./trainingData:/usr/share/tessdata \
-v ./extraConfigs:/configs \
-v ./logs:/logs \
-v /location/of/trainingData:/usr/share/tessdata \
-v /location/of/extraConfigs:/configs \
-v /location/of/logs:/logs \
-e DOCKER_ENABLE_SECURITY=false \
-e INSTALL_BOOK_AND_ADVANCED_HTML_OPS=false \
-e LANGS=en_GB \
--name stirling-pdf \
frooodle/s-pdf:latest
Can also add these for customisation but are not required
-v /location/of/customFiles:/customFiles \
```
### Docker Compose
Docker Compose
```yaml
version: '3.3'
services:
@@ -150,179 +131,114 @@ services:
ports:
- '8080:8080'
volumes:
- ./trainingData:/usr/share/tessdata #Required for extra OCR languages
- ./extraConfigs:/configs
# - ./customFiles:/customFiles/
# - ./logs:/logs/
- /location/of/trainingData:/usr/share/tessdata #Required for extra OCR languages
- /location/of/extraConfigs:/configs
# - /location/of/customFiles:/customFiles/
# - /location/of/logs:/logs/
environment:
- DOCKER_ENABLE_SECURITY=false
- INSTALL_BOOK_AND_ADVANCED_HTML_OPS=false
- LANGS=en_GB
```
Note: Podman is CLI-compatible with Docker, so simply replace "docker" with "podman".
## Enable OCR/Compression feature
Please view https://github.com/Stirling-Tools/Stirling-PDF/blob/main/HowToUseOCR.md
## Supported Languages
Stirling PDF currently supports 38!
| Language | Progress |
| ------------------------------------------- | -------------------------------------- |
| Arabic (العربية) (ar_AR) | ![99%](https://geps.dev/progress/99) |
| Basque (Euskara) (eu_ES) | ![60%](https://geps.dev/progress/60) |
| Bulgarian (Български) (bg_BG) | ![91%](https://geps.dev/progress/91) |
| Catalan (Català) (ca_CA) | ![47%](https://geps.dev/progress/47) |
| Croatian (Hrvatski) (hr_HR) | ![91%](https://geps.dev/progress/91) |
| Czech (Česky) (cs_CZ) | ![87%](https://geps.dev/progress/87) |
| Danish (Dansk) (da_DK) | ![96%](https://geps.dev/progress/96) |
| Dutch (Nederlands) (nl_NL) | ![93%](https://geps.dev/progress/93) |
| English (English) (en_GB) | ![100%](https://geps.dev/progress/100) |
| English (US) (en_US) | ![100%](https://geps.dev/progress/100) |
| French (Français) (fr_FR) | ![90%](https://geps.dev/progress/90) |
| German (Deutsch) (de_DE) | ![98%](https://geps.dev/progress/98) |
| Greek (Ελληνικά) (el_GR) | ![79%](https://geps.dev/progress/79) |
| Hindi (हिंदी) (hi_IN) | ![76%](https://geps.dev/progress/76) |
| Hungarian (Magyar) (hu_HU) | ![73%](https://geps.dev/progress/73) |
| Indonesia (Bahasa Indonesia) (id_ID) | ![74%](https://geps.dev/progress/74) |
| Irish (Gaeilge) (ga_IE) | ![95%](https://geps.dev/progress/95) |
| Italian (Italiano) (it_IT) | ![99%](https://geps.dev/progress/99) |
| Japanese (日本語) (ja_JP) | ![89%](https://geps.dev/progress/89) |
| Korean (한국어) (ko_KR) | ![81%](https://geps.dev/progress/81) |
| Norwegian (Norsk) (no_NB) | ![95%](https://geps.dev/progress/95) |
| Polish (Polski) (pl_PL) | ![89%](https://geps.dev/progress/89) |
| Portuguese (Português) (pt_PT) | ![76%](https://geps.dev/progress/76) |
| Portuguese Brazilian (Português) (pt_BR) | ![98%](https://geps.dev/progress/98) |
| Romanian (Română) (ro_RO) | ![97%](https://geps.dev/progress/97) |
| Russian (Русский) (ru_RU) | ![81%](https://geps.dev/progress/81) |
| Serbian Latin alphabet (Srpski) (sr_LATN_RS) | ![76%](https://geps.dev/progress/76) |
| Simplified Chinese (简体中文) (zh_CN) | ![98%](https://geps.dev/progress/98) |
| Slovakian (Slovensky) (sk_SK) | ![89%](https://geps.dev/progress/89) |
| Spanish (Español) (es_ES) | ![98%](https://geps.dev/progress/98) |
| Swedish (Svenska) (sv_SE) | ![97%](https://geps.dev/progress/97) |
| Thai (ไทย) (th_TH) | ![96%](https://geps.dev/progress/96) |
| Traditional Chinese (繁體中文) (zh_TW) | ![95%](https://geps.dev/progress/95) |
| Turkish (Türkçe) (tr_TR) | ![96%](https://geps.dev/progress/96) |
| Ukrainian (Українська) (uk_UA) | ![87%](https://geps.dev/progress/87) |
| Vietnamese (Tiếng Việt) (vi_VN) | ![96%](https://geps.dev/progress/96) |
Stirling PDF currently supports 26!
- English (English) (en_GB)
- English (US) (en_US)
- Arabic (العربية) (ar_AR)
- German (Deutsch) (de_DE)
- French (Français) (fr_FR)
- Spanish (Español) (es_ES)
- Simplified Chinese (简体中文) (zh_CN)
- Traditional Chinese (繁體中文) (zh_TW)
- Catalan (Català) (ca_CA)
- Italian (Italiano) (it_IT)
- Swedish (Svenska) (sv_SE)
- Polish (Polski) (pl_PL)
- Romanian (Română) (ro_RO)
- Korean (한국어) (ko_KR)
- Portuguese Brazilian (Português) (pt_BR)
- Russian (Русский) (ru_RU)
- Basque (Euskara) (eu_ES)
- Japanese (日本語) (ja_JP)
- Dutch (Nederlands) (nl_NL)
- Greek (el_GR)
- Turkish (Türkçe) (tr_TR)
- Indonesia (Bahasa Indonesia) (id_ID)
- Hindi (हिंदी) (hi_IN)
- Hungarian (Magyar) (hu_HU)
- Bulgarian (Български) (bg_BG)
- Sebian Latin alphabet (Srpski) (sr-Latn-RS)
## Contributing (creating issues, translations, fixing bugs, etc.)
Please see our [Contributing Guide](CONTRIBUTING.md)!
## Customisation
Stirling PDF allows easy customization of the app.
Includes things like
- Custom application name
- Custom slogans, icons, HTML, images CSS etc (via file overrides)
- Custom slogans, icons, images, and even custom HTML (via file overrides)
There are two options for this, either using the generated settings file ``settings.yml``
This file is located in the ``/configs`` directory and follows standard YAML formatting
Environment variables are also supported and would override the settings file
For example in the settings.yml you have
```yaml
security:
enableLogin: 'true'
system:
defaultLocale: 'en-US'
```
To have this via an environment variable you would have ``SECURITY_ENABLELOGIN``
To have this via an environment variable you would have ``SYSTEM_DEFAULTLOCALE``
The Current list of settings is
```yaml
security:
enableLogin: false # set to 'true' to enable login
csrfDisabled: true # Set to 'true' to disable CSRF protection (not recommended for production)
loginAttemptCount: 5 # lock user account after 5 tries; when using e.g. Fail2Ban you can deactivate the function with -1
loginResetTimeMinutes: 120 # lock account for 2 hours after x attempts
loginMethod: all # 'all' (Login Username/Password and OAuth2[must be enabled and configured]), 'normal'(only Login with Username/Password) or 'oauth2'(only Login with OAuth2)
initialLogin:
username: '' # Initial username for the first login
password: '' # Initial password for the first login
oauth2:
enabled: false # set to 'true' to enable login (Note: enableLogin must also be 'true' for this to work)
client:
keycloak:
issuer: '' # URL of the Keycloak realm's OpenID Connect Discovery endpoint
clientId: '' # Client ID for Keycloak OAuth2
clientSecret: '' # Client Secret for Keycloak OAuth2
scopes: openid, profile, email # Scopes for Keycloak OAuth2
useAsUsername: preferred_username # Field to use as the username for Keycloak OAuth2
google:
clientId: '' # Client ID for Google OAuth2
clientSecret: '' # Client Secret for Google OAuth2
scopes: https://www.googleapis.com/auth/userinfo.email, https://www.googleapis.com/auth/userinfo.profile # Scopes for Google OAuth2
useAsUsername: email # Field to use as the username for Google OAuth2
github:
clientId: '' # Client ID for GitHub OAuth2
clientSecret: '' # Client Secret for GitHub OAuth2
scopes: read:user # Scope for GitHub OAuth2
useAsUsername: login # Field to use as the username for GitHub OAuth2
issuer: '' # set to any provider that supports OpenID Connect Discovery (/.well-known/openid-configuration) end-point
clientId: '' # Client ID from your provider
clientSecret: '' # Client Secret from your provider
autoCreateUser: false # set to 'true' to allow auto-creation of non-existing users
blockRegistration: false # set to 'true' to deny login with SSO without prior registration by an admin
useAsUsername: email # Default is 'email'; custom fields can be used as the username
scopes: openid, profile, email # Specify the scopes for which the application will request permissions
provider: google # Set this to your OAuth provider's name, e.g., 'google' or 'keycloak'
csrfDisabled: true
system:
defaultLocale: 'en-US' # Set the default language (e.g. 'de-DE', 'fr-FR', etc)
googlevisibility: false # 'true' to allow Google visibility (via robots.txt), 'false' to disallow
enableAlphaFunctionality: false # Set to enable functionality which might need more testing before it fully goes live (This feature might make no changes)
showUpdate: true # see when a new update is available
showUpdateOnlyAdmin: false # Only admins can see when a new update is available, depending on showUpdate it must be set to 'true'
customHTMLFiles: false # enable to have files placed in /customFiles/templates override the existing template html files
customStaticFilePath: '/customFiles/static/' # Directory path for custom static files
ui:
appName: '' # Application's visible name
homeDescription: '' # Short description or tagline shown on homepage.
appNameNavbar: '' # Name displayed on the navigation bar
#ui:
# appName: exampleAppName # Application's visible name
# homeDescription: I am a description # Short description or tagline shown on homepage.
# appNameNavbar: navbarName # Name displayed on the navigation bar
endpoints:
toRemove: [] # List endpoints to disable (e.g. ['img-to-pdf', 'remove-pages'])
groupsToRemove: [] # List groups to disable (e.g. ['LibreOffice'])
metrics:
enabled: true # 'true' to enable Info APIs (`/api/*`) endpoints, 'false' to disable
enabled: true # 'true' to enable Info APIs endpoints (view http://localhost:8080/swagger-ui/index.html#/API to learn more), 'false' to disable
```
There is an additional config file ``/configs/custom_settings.yml`` were users familiar with java and spring application.properties can input their own settings on-top of Stirling-PDFs existing ones
### Extra notes
- Endpoints. Currently, the endpoints ENDPOINTS_TO_REMOVE and GROUPS_TO_REMOVE can include comma separate lists of endpoints and groups to disable as example ENDPOINTS_TO_REMOVE=img-to-pdf,remove-pages would disable both image-to-pdf and remove pages, GROUPS_TO_REMOVE=LibreOffice Would disable all things that use LibreOffice. You can see a list of all endpoints and groups [here](https://github.com/Stirling-Tools/Stirling-PDF/blob/main/Endpoint-groups.md)
- customStaticFilePath. Customise static files such as the app logo by placing files in the /customFiles/static/ directory. An example of customising app logo is placing a /customFiles/static/favicon.svg to override current SVG. This can be used to change any images/icons/css/fonts/js etc in Stirling-PDF
### Environment only parameters
- ``SYSTEM_ROOTURIPATH`` ie set to ``/pdf-app`` to Set the application's root URI to ``localhost:8080/pdf-app``
- ``SYSTEM_CONNECTIONTIMEOUTMINUTES`` to set custom connection timeout values
- ``DOCKER_ENABLE_SECURITY`` to tell docker to download security jar (required as true for auth login)
- ``INSTALL_BOOK_AND_ADVANCED_HTML_OPS`` to download calibre onto stirling-pdf enabling pdf to/from book and advanced html conversion
- ``LANGS`` to define custom font libraries to install for use for document conversions
## API
For those wanting to use Stirling-PDFs backend API to link with their own custom scripting to edit PDFs you can view all existing API documentation
[here](https://app.swaggerhub.com/apis-docs/Stirling-Tools/Stirling-PDF/) or navigate to /swagger-ui/index.html of your stirling-pdf instance for your versions documentation (Or by following the API button in your settings of Stirling-PDF)
## Login authentication
![stirling-login](images/login-light.png)
### Prerequisites
### Prerequisites:
- User must have the folder ./configs volumed within docker so that it is retained during updates.
- Docker users must download the security jar version by setting ``DOCKER_ENABLE_SECURITY`` to ``true`` in environment variables.
- Docker uses must download the security jar version by setting ``DOCKER_ENABLE_SECURITY`` to ``true`` in environment variables.
- Then either enable login via the settings.yml file or via setting ``SECURITY_ENABLE_LOGIN`` to ``true``
- Now the initial user will be generated with username ``admin`` and password ``stirling``. On login you will be forced to change the password to a new one. You can also use the environment variables ``SECURITY_INITIALLOGIN_USERNAME`` and ``SECURITY_INITIALLOGIN_PASSWORD`` to set your own straight away (Recommended to remove them after user creation).
@@ -336,22 +252,20 @@ To add new users go to the bottom of Account settings and hit 'Admin Settings',
For API usage you must provide a header with 'X-API-Key' and the associated API key for that user.
## FAQ
### Q1: What are your planned features?
- Progress bar/Tracking
- Full custom logic pipelines to combine multiple operations together.
- Folder support with auto scanning to perform operations on
- Redact text (Via UI not just automated way)
- Add Forms
- Multi page layout (Stich PDF pages together) support x rows y columns and custom page sizing
- Fill forms manually or automatically
- Fill forms mannual and automatic
### Q2: Why is my application downloading .htm files?
This is an issue caused commonly by your NGINX configuration. The default file upload size for NGINX is 1MB, you need to add the following in your Nginx sites-available file. ``client_max_body_size SIZE;`` Where "SIZE" is 50M for example for 50MB files.
### Q3: Why is my download timing out
NGINX has timeout values by default so if you are running Stirling-PDF behind NGINX you may need to set a timeout value such as adding the config ``proxy_read_timeout 3600;``

View File

@@ -1,56 +1,64 @@
|All versions in a Docker environment can download Calibre as a optional extra at runtime to support `book-to-pdf` and `pdf-to-book` using parameter ``INSTALL_BOOK_AND_ADVANCED_HTML_OPS``.
The 'Fat' container contains all those found in 'Full' with security jar along with this Calibre install.
|Technology | Ultra-Lite | Lite | Full |
|----------------|:----------:|:----:|:----:|
| Java | ✔️ | ✔️ | ✔️ |
| JavaScript | ✔️ | ✔️ | ✔️ |
| Libre | | ✔️ | ✔️ |
| Python | | | ✔️ |
| OpenCV | | | ✔️ |
| OCRmyPDF | | | ✔️ |
Technology | Ultra-Lite | Full |
| ---------- | :--------: | :---: |
| Java | ✔️ | ✔️ |
| JavaScript | ✔️ | ✔️ |
| Libre | | ✔️ |
| Python | | ✔️ |
| OpenCV | | ✔️ |
| OCRmyPDF | | ✔️ |
| Operation | Ultra-Lite | Full |
| ---------------------- | ---------- | ---- |
| add-page-numbers | ✔️ | ✔️ |
| add-password | ✔️ | ✔️ |
| add-image | ✔️ | ✔️ |
| add-watermark | ✔️ | ✔️ |
| adjust-contrast | ✔️ | ✔️ |
| auto-split-pdf | ✔️ | ✔️ |
| auto-redact | ✔️ | ✔️ |
| auto-rename | ✔️ | ✔️ |
| cert-sign | ✔️ | ✔️ |
| remove-cert-sign | ✔️ | ✔️ |
| crop | ✔️ | ✔️ |
| change-metadata | ✔️ | ✔️ |
| change-permissions | ✔️ | ✔️ |
| compare | ✔️ | ✔️ |
| extract-page | ✔️ | ✔️ |
| extract-images | ✔️ | ✔️ |
| flatten | ✔️ | ✔️ |
| get-info-on-pdf | ✔️ | ✔️ |
| img-to-pdf | ✔️ | ✔️ |
| markdown-to-pdf | ✔️ | ✔️ |
| merge-pdfs | ✔️ | ✔️ |
| multi-page-layout | ✔️ | ✔️ |
| overlay-pdf | ✔️ | ✔️ |
| pdf-organizer | ✔️ | ✔️ |
| pdf-to-csv | ✔️ | ✔️ |
| pdf-to-img | ✔️ | ✔️ |
| pdf-to-single-page | ✔️ | ✔️ |
| remove-pages | ✔️ | ✔️ |
| remove-password | ✔️ | ✔️ |
| rotate-pdf | ✔️ | ✔️ |
| sanitize-pdf | ✔️ | ✔️ |
| scale-pages | ✔️ | ✔️ |
| sign | ✔️ | ✔️ |
| show-javascript | ✔️ | ✔️ |
| split-by-size-or-count | ✔️ | ✔️ |
| split-pdf-by-sections | ✔️ | ✔️ |
| split-pdfs | ✔️ | ✔️ |
| compress-pdf | | ✔️ |
| extract-image-scans | | ✔️ |
| ocr-pdf | | ✔️ |
| pdf-to-pdfa | | ✔️ |
| remove-blanks | | ✔️ |
Operation | Ultra-Lite | Lite | Full
--------------------|------------|------|-----
add-page-numbers | ✔️ | ✔️ | ✔️
add-password | ✔️ | ✔️ | ✔️
add-image | ✔️ | ✔️ | ✔️
add-watermark | ✔️ | ✔️ | ✔️
adjust-contrast | ✔️ | ✔️ | ✔️
auto-split-pdf | ✔️ | ✔️ | ✔️
auto-redact | ✔️ | ✔️ | ✔️
auto-rename | ✔️ | ✔️ | ✔️
cert-sign | ✔️ | ✔️ | ✔️
crop | ✔️ | ✔️ | ✔️
change-metadata | ✔️ | ✔️ | ✔️
change-permissions | ✔️ | ✔️ | ✔️
compare | ✔️ | ✔️ | ✔️
extract-page | ✔️ | ✔️ | ✔️
extract-images | ✔️ | ✔️ | ✔️
flatten | ✔️ | ✔️ | ✔️
get-info-on-pdf | ✔️ | ✔️ | ✔️
img-to-pdf | ✔️ | ✔️ | ✔️
markdown-to-pdf | ✔️ | ✔️ | ✔️
merge-pdfs | ✔️ | ✔️ | ✔️
multi-page-layout | ✔️ | ✔️ | ✔️
overlay-pdf | ✔️ | ✔️ | ✔️
pdf-organizer | ✔️ | ✔️ | ✔️
pdf-to-csv | ✔️ | ✔️ | ✔️
pdf-to-img | ✔️ | ✔️ | ✔️
pdf-to-single-page | ✔️ | ✔️ | ✔️
remove-pages | ✔️ | ✔️ | ✔️
remove-password | ✔️ | ✔️ | ✔️
rotate-pdf | ✔️ | ✔️ | ✔️
sanitize-pdf | ✔️ | ✔️ | ✔️
scale-pages | ✔️ | ✔️ | ✔️
sign | ✔️ | ✔️ | ✔️
show-javascript | ✔️ | ✔️ | ✔️
split-by-size-or-count | ✔️ | ✔️ | ✔️
split-pdf-by-sections | ✔️ | ✔️ | ✔️
split-pdfs | ✔️ | ✔️ | ✔️
file-to-pdf | | ✔️ | ✔️
pdf-to-html | | ✔️ | ✔️
pdf-to-presentation | | ✔️ | ✔️
pdf-to-text | | ✔️ | ✔️
pdf-to-word | | ✔️ | ✔️
pdf-to-xml | | ✔️ | ✔️
repair | | ✔️ | ✔️
xlsx-to-pdf | | ✔️ | ✔️
compress-pdf | | | ✔️
extract-image-scans | | | ✔️
ocr-pdf | | | ✔️
pdf-to-pdfa | | | ✔️
remove-blanks | | | ✔️

View File

@@ -1,39 +1,26 @@
plugins {
id "java"
id "org.springframework.boot" version "3.3.3"
id "io.spring.dependency-management" version "1.1.6"
id "org.springdoc.openapi-gradle-plugin" version "1.8.0"
id 'java'
id 'org.springframework.boot' version '3.2.2'
id 'io.spring.dependency-management' version '1.1.3'
id 'org.springdoc.openapi-gradle-plugin' version '1.8.0'
id "io.swagger.swaggerhub" version "1.3.2"
id "edu.sc.seis.launch4j" version "3.0.6"
id "com.diffplug.spotless" version "6.25.0"
id "com.github.jk1.dependency-license-report" version "2.9"
//id "nebula.lint" version "19.0.3"
id 'edu.sc.seis.launch4j' version '3.0.5'
id 'com.diffplug.spotless' version '6.25.0'
id 'com.github.jk1.dependency-license-report' version '2.5'
}
import com.github.jk1.license.render.*
ext {
springBootVersion = "3.3.3"
pdfboxVersion = "3.0.3"
logbackVersion = "1.5.7"
imageioVersion = "3.11.0"
lombokVersion = "1.18.34"
bouncycastleVersion = "1.78.1"
}
group = "stirling.software"
version = "0.29.0"
java {
// 17 is lowest but we support and recommend 21
sourceCompatibility = JavaVersion.VERSION_17
}
group = 'stirling.software'
version = '0.21.0'
sourceCompatibility = '17'
repositories {
mavenCentral()
maven { url "https://jitpack.io" }
}
licenseReport {
renderers = [new JsonReportRenderer()]
}
@@ -41,19 +28,15 @@ licenseReport {
sourceSets {
main {
java {
if (System.getenv("DOCKER_ENABLE_SECURITY") == "false") {
exclude "stirling/software/SPDF/config/security/**"
exclude "stirling/software/SPDF/controller/api/UserController.java"
exclude "stirling/software/SPDF/controller/api/DatabaseController.java"
exclude "stirling/software/SPDF/controller/web/AccountWebController.java"
exclude "stirling/software/SPDF/controller/web/DatabaseWebController.java"
exclude "stirling/software/SPDF/model/ApiKeyAuthenticationToken.java"
exclude "stirling/software/SPDF/model/AttemptCounter.java"
exclude "stirling/software/SPDF/model/Authority.java"
exclude "stirling/software/SPDF/model/PersistentLogin.java"
exclude "stirling/software/SPDF/model/SessionEntity.java"
exclude "stirling/software/SPDF/model/User.java"
exclude "stirling/software/SPDF/repository/**"
if (System.getenv('DOCKER_ENABLE_SECURITY') == 'false') {
exclude 'stirling/software/SPDF/config/security/**'
exclude 'stirling/software/SPDF/controller/api/UserController.java'
exclude 'stirling/software/SPDF/controller/web/AccountWebController.java'
exclude 'stirling/software/SPDF/model/ApiKeyAuthenticationToken.java'
exclude 'stirling/software/SPDF/model/Authority.java'
exclude 'stirling/software/SPDF/model/PersistentLogin.java'
exclude 'stirling/software/SPDF/model/User.java'
exclude 'stirling/software/SPDF/repository/**'
}
}
}
@@ -65,35 +48,36 @@ openApi {
outputFileName = "SwaggerDoc.json"
}
launch4j {
icon = "${projectDir}/src/main/resources/static/favicon.ico"
icon = "${projectDir}/src/main/resources/static/favicon.ico"
outfile="Stirling-PDF.exe"
headerType="console"
jarTask = tasks.bootJar
outfile="Stirling-PDF.exe"
headerType="console"
jarTask = tasks.bootJar
errTitle="Encountered error, Do you have Java 21?"
downloadUrl="https://download.oracle.com/java/21/latest/jdk-21_windows-x64_bin.exe"
variables=["BROWSER_OPEN=true", "ENDPOINTS_GROUPS_TO_REMOVE=CLI"]
jreMinVersion="17"
errTitle="Encountered error, Do you have Java 17?"
downloadUrl="https://download.oracle.com/java/17/latest/jdk-17_windows-x64_bin.exe"
variables=["BROWSER_OPEN=true", "ENDPOINTS_GROUPS_TO_REMOVE=CLI"]
jreMinVersion="17"
mutexName="Stirling-PDF"
windowTitle="Stirling-PDF"
mutexName="Stirling-PDF"
windowTitle="Stirling-PDF"
messagesStartupError="An error occurred while starting Stirling-PDF"
// messagesJreNotFoundError="This application requires a Java Runtime Environment, Please download Java 17."
messagesJreVersionError="You are running the wrong version of Java, Please download Java 21."
messagesLauncherError="Java is corrupted. Please uninstall and then install Java 21."
messagesInstanceAlreadyExists="Stirling-PDF is already running."
messagesStartupError="An error occurred while starting Stirling-PDF"
//messagesJreNotFoundError="This application requires a Java Runtime Environment, Please download Java 17."
messagesJreVersionError="You are running the wrong version of Java, Please download Java 17."
messagesLauncherError="Java is corrupted. Please uninstall and then install Java 17."
messagesInstanceAlreadyExists="Stirling-PDF is already running."
}
spotless {
java {
target project.fileTree('src/main/java')
googleJavaFormat("1.22.0").aosp().reorderImports(false)
googleJavaFormat('1.19.1').aosp().reorderImports(false)
importOrder("java", "javax", "org", "com", "net", "io")
importOrder('java', 'javax', 'org', 'com', 'net', 'io')
toggleOffOn()
trimTrailingWhitespace()
indentWithSpaces()
@@ -101,143 +85,123 @@ spotless {
}
}
//gradleLint {
// rules=['unused-dependency']
// }
tasks.wrapper {
gradleVersion = "8.7"
}
//tasks.withType(JavaCompile) {
// options.compilerArgs << "-Xlint:deprecation"
//}
configurations.all {
exclude group: "org.springframework.boot", module: "spring-boot-starter-tomcat"
}
dependencies {
//security updates
implementation "org.springframework:spring-webmvc:6.1.9"
implementation 'ch.qos.logback:logback-classic:1.4.14'
implementation 'ch.qos.logback:logback-core:1.4.14'
implementation 'org.springframework:spring-webmvc:6.1.2'
implementation("io.github.pixee:java-security-toolkit:1.2.0")
// implementation "org.yaml:snakeyaml:2.2"
implementation 'com.github.Carleslc.Simple-YAML:Simple-Yaml:1.8.4'
// Exclude Tomcat and include Jetty
implementation("org.springframework.boot:spring-boot-starter-web:$springBootVersion")
implementation "org.springframework.boot:spring-boot-starter-jetty:$springBootVersion"
implementation "org.springframework.boot:spring-boot-starter-thymeleaf:$springBootVersion"
if (System.getenv("DOCKER_ENABLE_SECURITY") != "false") {
implementation "org.springframework.boot:spring-boot-starter-security:$springBootVersion"
runtimeOnly "org.thymeleaf.extras:thymeleaf-extras-springsecurity5:3.1.2.RELEASE"
implementation "org.springframework.boot:spring-boot-starter-data-jpa:$springBootVersion"
implementation "org.springframework.boot:spring-boot-starter-oauth2-client:$springBootVersion"
implementation("io.github.pixee:java-security-toolkit:1.1.2")
implementation 'org.yaml:snakeyaml:2.2'
implementation 'org.springframework.boot:spring-boot-starter-web:3.2.2'
implementation 'org.springframework.boot:spring-boot-starter-thymeleaf:3.2.2'
if (System.getenv('DOCKER_ENABLE_SECURITY') != 'false') {
implementation 'org.springframework.boot:spring-boot-starter-security:3.2.2'
implementation 'org.thymeleaf.extras:thymeleaf-extras-springsecurity5:3.1.2.RELEASE'
implementation "org.springframework.boot:spring-boot-starter-data-jpa:3.2.2"
//2.2.x requires rebuild of DB file.. need migration path
runtimeOnly "com.h2database:h2:2.1.214"
// implementation "com.h2database:h2:2.2.224"
implementation "com.h2database:h2:2.1.214"
}
testImplementation "org.springframework.boot:spring-boot-starter-test:$springBootVersion"
testImplementation 'org.springframework.boot:spring-boot-starter-test:3.2.2'
// Batik
implementation "org.apache.xmlgraphics:batik-all:1.17"
implementation 'org.apache.xmlgraphics:batik-all:1.17'
// TwelveMonkeys
runtimeOnly "com.twelvemonkeys.imageio:imageio-batik:$imageioVersion"
runtimeOnly "com.twelvemonkeys.imageio:imageio-bmp:$imageioVersion"
// runtimeOnly "com.twelvemonkeys.imageio:imageio-hdr:$imageioVersion"
// runtimeOnly "com.twelvemonkeys.imageio:imageio-icns:$imageioVersion"
// runtimeOnly "com.twelvemonkeys.imageio:imageio-iff:$imageioVersion"
runtimeOnly "com.twelvemonkeys.imageio:imageio-jpeg:$imageioVersion"
// runtimeOnly "com.twelvemonkeys.imageio:imageio-pcx:$imageioVersion@
// runtimeOnly "com.twelvemonkeys.imageio:imageio-pict:$imageioVersion"
// runtimeOnly "com.twelvemonkeys.imageio:imageio-pnm:$imageioVersion"
// runtimeOnly "com.twelvemonkeys.imageio:imageio-psd:$imageioVersion"
// runtimeOnly "com.twelvemonkeys.imageio:imageio-sgi:$imageioVersion"
// runtimeOnly "com.twelvemonkeys.imageio:imageio-tga:$imageioVersion"
// runtimeOnly "com.twelvemonkeys.imageio:imageio-thumbsdb:$imageioVersion"
runtimeOnly "com.twelvemonkeys.imageio:imageio-tiff:$imageioVersion"
runtimeOnly "com.twelvemonkeys.imageio:imageio-webp:$imageioVersion"
// runtimeOnly "com.twelvemonkeys.imageio:imageio-xwd:$imageioVersion"
implementation 'com.twelvemonkeys.imageio:imageio-batik:3.10.1'
implementation 'com.twelvemonkeys.imageio:imageio-bmp:3.10.1'
// implementation 'com.twelvemonkeys.imageio:imageio-hdr:3.10.1'
// implementation 'com.twelvemonkeys.imageio:imageio-icns:3.10.1'
// implementation 'com.twelvemonkeys.imageio:imageio-iff:3.10.1'
implementation 'com.twelvemonkeys.imageio:imageio-jpeg:3.10.1'
// implementation 'com.twelvemonkeys.imageio:imageio-pcx:3.10.1'
// implementation 'com.twelvemonkeys.imageio:imageio-pict:3.10.1'
// implementation 'com.twelvemonkeys.imageio:imageio-pnm:3.10.1'
// implementation 'com.twelvemonkeys.imageio:imageio-psd:3.10.1'
// implementation 'com.twelvemonkeys.imageio:imageio-sgi:3.10.1'
// implementation 'com.twelvemonkeys.imageio:imageio-tga:3.10.1'
// implementation 'com.twelvemonkeys.imageio:imageio-thumbsdb:3.10.1'
implementation 'com.twelvemonkeys.imageio:imageio-tiff:3.10.1'
implementation 'com.twelvemonkeys.imageio:imageio-webp:3.10.1'
// implementation 'com.twelvemonkeys.imageio:imageio-xwd:3.10.1'
implementation 'commons-io:commons-io:2.15.1'
implementation 'org.springdoc:springdoc-openapi-starter-webmvc-ui:2.2.0'
implementation "commons-io:commons-io:2.16.1"
implementation "org.springdoc:springdoc-openapi-starter-webmvc-ui:2.2.0"
//general PDF
// https://mvnrepository.com/artifact/com.opencsv/opencsv
implementation ("com.opencsv:opencsv:5.9") {
exclude group: "commons-logging", module: "commons-logging"
implementation ('com.opencsv:opencsv:5.9') {
exclude group: 'commons-logging', module: 'commons-logging'
}
implementation ("org.apache.pdfbox:pdfbox:$pdfboxVersion") {
exclude group: "commons-logging", module: "commons-logging"
implementation ('org.apache.pdfbox:pdfbox:3.0.1'){
exclude group: 'commons-logging', module: 'commons-logging'
}
implementation ("org.apache.pdfbox:xmpbox:$pdfboxVersion") {
exclude group: "commons-logging", module: "commons-logging"
implementation ('org.apache.pdfbox:xmpbox:3.0.1'){
exclude group: 'commons-logging', module: 'commons-logging'
}
implementation 'org.apache.pdfbox:jbig2-imageio:3.0.4'
implementation "org.bouncycastle:bcprov-jdk18on:$bouncycastleVersion"
implementation "org.bouncycastle:bcpkix-jdk18on:$bouncycastleVersion"
implementation "org.springframework.boot:spring-boot-starter-actuator:$springBootVersion"
implementation "io.micrometer:micrometer-core:1.13.4"
implementation group: "com.google.zxing", name: "core", version: "3.5.3"
implementation 'org.bouncycastle:bcprov-jdk18on:1.77'
implementation 'org.bouncycastle:bcpkix-jdk18on:1.77'
implementation 'org.springframework.boot:spring-boot-starter-actuator:3.2.2'
implementation 'io.micrometer:micrometer-core:1.12.2'
implementation group: 'com.google.zxing', name: 'core', version: '3.5.2'
// https://mvnrepository.com/artifact/org.commonmark/commonmark
implementation "org.commonmark:commonmark:0.22.0"
implementation "org.commonmark:commonmark-ext-gfm-tables:0.22.0"
// https://mvnrepository.com/artifact/com.bucket4j/bucket4j_jdk17
implementation "com.bucket4j:bucket4j_jdk17-core:8.14.0"
implementation "com.fathzer:javaluator:3.0.5"
implementation 'org.commonmark:commonmark:0.21.0'
implementation 'org.commonmark:commonmark-ext-gfm-tables:0.21.0'
// https://mvnrepository.com/artifact/com.github.vladimir-bukhtoyarov/bucket4j-core
implementation 'com.github.vladimir-bukhtoyarov:bucket4j-core:7.6.0'
developmentOnly("org.springframework.boot:spring-boot-devtools:$springBootVersion")
compileOnly "org.projectlombok:lombok:$lombokVersion"
annotationProcessor "org.projectlombok:lombok:$lombokVersion"
testRuntimeOnly 'org.mockito:mockito-inline:5.2.0'
developmentOnly("org.springframework.boot:spring-boot-devtools:3.2.2")
compileOnly 'org.projectlombok:lombok:1.18.30'
annotationProcessor 'org.projectlombok:lombok:1.18.28'
}
tasks.withType(JavaCompile).configureEach {
options.encoding = "UTF-8"
dependsOn "spotlessApply"
tasks.withType(JavaCompile) {
dependsOn 'spotlessApply'
}
compileJava {
options.compilerArgs << "-parameters"
options.compilerArgs << '-parameters'
}
task writeVersion {
def propsFile = file("src/main/resources/version.properties")
def propsFile = file('src/main/resources/version.properties')
def props = new Properties()
props.setProperty("version", version)
props.setProperty('version', version)
props.store(propsFile.newWriter(), null)
}
swaggerhubUpload {
//dependsOn generateOpenApiDocs // Depends on your task generating Swagger docs
api "Stirling-PDF" // The name of your API on SwaggerHub
owner "Frooodle" // Your SwaggerHub username (or organization name)
api 'Stirling-PDF' // The name of your API on SwaggerHub
owner 'Frooodle' // Your SwaggerHub username (or organization name)
version project.version // The version of your API
inputFile "./SwaggerDoc.json" // The path to your Swagger docs
token "${System.getenv("SWAGGERHUB_API_KEY")}" // Your SwaggerHub API key, passed as an environment variable
oas "3.0.0" // The version of the OpenAPI Specification you"re using
inputFile './SwaggerDoc.json' // The path to your Swagger docs
token "${System.getenv('SWAGGERHUB_API_KEY')}" // Your SwaggerHub API key, passed as an environment variable
oas '3.0.0' // The version of the OpenAPI Specification you're using
}
jar {
enabled = false
manifest {
attributes "Implementation-Title": "Stirling-PDF",
"Implementation-Version": project.version
attributes 'Implementation-Title': 'Stirling-PDF',
'Implementation-Version': project.version
}
}
tasks.named("test") {
tasks.named('test') {
useJUnitPlatform()
}
task printVersion {
println project.version
println project.version
}

View File

@@ -1,7 +1,6 @@
apiVersion: v2
appVersion: 0.29.0
description: locally hosted web application that allows you to perform various operations
on PDF files
appVersion: 0.14.2
description: locally hosted web application that allows you to perform various operations on PDF files
home: https://github.com/Stirling-Tools/Stirling-PDF
keywords:
- stirling-pdf

View File

@@ -62,10 +62,8 @@ spec:
imagePullPolicy: {{ .Values.image.pullPolicy }}
securityContext:
{{- toYaml .Values.containerSecurityContext | nindent 10 }}
env:
- name: SYSTEM_ROOTURIPATH
value: {{ .Values.rootPath}}
{{- if .Values.envs }}
env:
{{ toYaml .Values.envs | indent 8 }}
{{- end }}
{{- if .Values.extraArgs }}
@@ -77,13 +75,13 @@ spec:
containerPort: 8080
livenessProbe:
httpGet:
path: {{ .Values.rootPath}}
path: /
port: http
{{ toYaml .Values.probes.livenessHttpGetConfig | indent 12 }}
{{ toYaml .Values.probes.liveness | indent 10 }}
readinessProbe:
httpGet:
path: {{ .Values.rootPath}}
path: /
port: http
{{ toYaml .Values.probes.readinessHttpGetConfig | indent 12 }}
{{ toYaml .Values.probes.readiness | indent 10 }}

View File

@@ -43,6 +43,6 @@ spec:
name: http
{{- end }}
protocol: TCP
selector:
{{- include "stirlingpdf.selectorLabels" . | nindent 4 }}

View File

@@ -15,18 +15,17 @@ secret:
commonLabels: {}
# team_name: dev
# rootpath for the application
rootPath: /
envs: []
# - name: UI_APP_NAME
# - name: PP_HOME_NAME
# value: "Stirling PDF"
# - name: UI_HOME_DESCRIPTION
# - name: APP_HOME_DESCRIPTION
# value: "Your locally hosted one-stop-shop for all your PDF needs."
# - name: UI_APP_NAVBAR_NAME
# - name: APP_NAVBAR_NAME
# value: "Stirling PDF"
# - name: ALLOW_GOOGLE_VISIBILITY
# value: "true"
# - name: APP_ROOT_PATH
# value: "/"
# - name: APP_LOCALE
# value: "en_GB"

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -1,158 +0,0 @@
{\rtf1\ansi\ansicpg1252\uc0\stshfdbch0\stshfloch0\stshfhich0\stshfbi0\deff0\adeff0{\fonttbl{\f0\froman\fcharset0\fprq2{\*\panose 02020603050405020304}Times New Roman;}{\f1\froman\fcharset2\fprq2{\*\panose 05050102010706020507}Symbol;}{\f2\fswiss\fcharset0\fprq2{\*\panose 020b0604020202020204}Arial;}}{\colortbl;\red0\green0\blue0;\red67\green67\blue67;
\red102\green102\blue102;}{\stylesheet{\s0\snext0\sqformat\spriority0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1 Normal;}{\s1\sbasedon0\snext0\styrsid15694742
\sqformat\spriority0\keep\keepn\fi0\sb400\sa120\aspalpha\aspnum\adjustright\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl240\slmult1\rtlch\ab0\ai0\af2\afs40\ltrch\b0\i0\fs40\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1 heading 1;}{\s2\sbasedon0\snext0\styrsid15694742
\sqformat\spriority0\keep\keepn\fi0\sb360\sa120\aspalpha\aspnum\adjustright\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl240\slmult1\rtlch\ab0\ai0\af2\afs32\ltrch\b0\i0\fs32\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1 heading 2;}{\s3\sbasedon0\snext0\styrsid15694742
\sqformat\spriority0\keep\keepn\fi0\sb320\sa80\aspalpha\aspnum\adjustright\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl240\slmult1\rtlch\ab0\ai0\af2\afs28\ltrch\b0\i0\fs28\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf2 heading 3;}{\s4\sbasedon0\snext0\styrsid15694742
\sqformat\spriority0\keep\keepn\fi0\sb280\sa80\aspalpha\aspnum\adjustright\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl240\slmult1\rtlch\ab0\ai0\af2\afs24\ltrch\b0\i0\fs24\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf3 heading 4;}{\s5\sbasedon0\snext0\styrsid15694742
\sqformat\spriority0\keep\keepn\fi0\sb240\sa80\aspalpha\aspnum\adjustright\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl240\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf3 heading 5;}{\s6\sbasedon0\snext0\styrsid15694742
\sqformat\spriority0\keep\keepn\fi0\sb240\sa80\aspalpha\aspnum\adjustright\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl240\slmult1\rtlch\ab0\ai\af2\afs22\ltrch\b0\i\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf3 heading 6;}{\*\cs10\additive\ssemihidden\spriority0 Default Paragraph Font;
}{\*\ts11\tsrowd\snext11\ssemihidden\spriority0\aspalpha\aspnum\adjustright\ltrpar\li0\lin0\ri0\rin0\ql\faauto\tsvertalt\tsbrdrl\tsbrdrr\tsbrdrt\tsbrdrb\tsbrdrdgr\tsbrdrdgl\tsbrdrh\tsbrdrv\trpaddl108\trpaddfl3\trwWidthB0\trftsWidthB3\trpaddt0\trpaddft3\trpaddb0
\trpaddfb3\trpaddr108\trpaddfr3 Normal Table;}{\s15\sbasedon0\snext15\styrsid15694742\sqformat\spriority0\keep\keepn\fi0\sb0\sa60\aspalpha\aspnum\adjustright\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl240\slmult1\rtlch\ab0\ai0\af2\afs52\ltrch\b0\i0\fs52\loch\af2
\dbch\af2\hich\f2\strike0\ulnone\cf1 Title;}{\s16\sbasedon0\snext16\styrsid15694742\sqformat\spriority0\keep\keepn\fi0\sb0\sa320\aspalpha\aspnum\adjustright\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl240\slmult1\rtlch\ab0\ai0\af2\afs30\ltrch\b0\i0\fs30
\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf3 Subtitle;}}{\*\rsidtbl\rsid10976062\rsid13249109}{\*\generator Aspose.Words for Java 23.4.0;}{\info\version1\edmins0\nofpages1\nofwords0\nofchars0\nofcharsws0}\paperw12240\paperh15840\margl1440\margr1440\margt1440\margb1440\gutter0{
\mmathPr\mbrkBin0\mbrkBinSub0\mdefJc1\mdispDef1\minterSp0\mintLim0\mintraSp0\mlMargin0\mmathFont0\mnaryLim1\mpostSp0\mpreSp0\mrMargin0\msmallFrac0\mwrapIndent1440\mwrapRight0}\deflang1033\deflangfe2052\adeflang1025\jexpand\showxmlerrors1\validatexml1{
\*\wgrffmtfilter 013f}\viewkind1\viewscale100\fet0\ftnbj\aenddoc\ftnrstcont\aftnrstcont\ftnnar\aftnnrlc\widowctrl\nospaceforul\nolnhtadjtbl\alntblind\lyttblrtgr\dntblnsbdb\noxlattoyen\wrppunct\nobrkwrptbl\expshrtn\snaptogridincell\asianbrkrule\htmautsp\noultrlspc
\useltbaln\splytwnine\ftnlytwnine\lytcalctblwd\allowfieldendsel\lnbrkrule\nouicompat\nofeaturethrottle1\utinl\formshade\nojkernpunct\dghspace180\dgvspace180\dghorigin1800\dgvorigin1440\dghshow1\dgvshow1\dgmargin\pgbrdrhead\pgbrdrfoot\rsidroot10976062\sectd\sectlinegrid360\pgwsxn12240\pghsxn15840\marglsxn1440\margrsxn1440\margtsxn1440\margbsxn1440\guttersxn0\headery720\footery720\colsx720\ltrsect\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar
\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\alang1025\afs22\ltrch\b0\i0\fs22\lang1033\langnp1033\langfe1033\langfenp1033\loch\af2\dbch\af2
\hich\f2\strike0\ulnone\cf1 A}{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar
\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0
\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2
\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22
\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw
\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}
\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb
\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0
\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0
\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0
\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0
\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2
\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22
\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw
\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}
\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb
\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0
\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0
\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0
\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0
\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2
\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22
\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw
\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}
\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb
\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0
\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0
\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0
\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0
\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2
\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22
\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw
\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}
\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb
\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0
\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0
\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0
\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0
\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2
\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22
\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw
\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}
\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb
\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0
\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0
\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0
\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0
\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2
\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22
\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw
\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\alang1025\afs22\ltrch\b0\i0\fs22\lang1033\langnp1033\langfe1033\langfenp1033\loch\af2
\dbch\af2\hich\f2\strike0\ulnone\cf1 B}{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar
\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard
\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb
\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0
\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0
\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0
\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0
\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2
\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22
\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw
\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}
\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb
\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0
\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0
\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0
\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0
\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2
\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22
\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw
\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}
\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb
\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0
\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0
\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0
\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0
\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2
\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22
\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw
\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}
\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb
\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0
\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0
\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0
\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0
\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2
\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22
\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw
\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}
\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb
\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0
\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0
\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0
\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0
\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2
\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22
\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw
\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}
\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22
\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}\pard\plain\itap0\s0\ilvl0\fi0\sb0\sa0\aspalpha\aspnum\adjustright\brdrt\brdrl\brdrb
\brdrr\brdrbtw\brdrbar\widctlpar\ltrpar\li0\lin0\ri0\rin0\ql\faauto\sl276\slmult1\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1{\rtlch\ab0\ai0\af2\alang1025\afs22\ltrch\b0\i0\fs22\lang1033\langnp1033\langfe1033\langfenp1033
\loch\af2\dbch\af2\hich\f2\strike0\ulnone\cf1 C}{\rtlch\ab0\ai0\af2\afs22\ltrch\b0\i0\fs22\loch\af2\dbch\af2\hich\f2\insrsid10976062\strike0\ulnone\cf1\par}{
\*\latentstyles\lsdstimax267\lsdlockeddef0\lsdsemihiddendef0\lsdunhideuseddef0\lsdqformatdef0\lsdprioritydef0{\lsdlockedexcept\lsdqformat1 Normal;\lsdqformat1 heading 1;\lsdsemihidden1\lsdunhideused1\lsdqformat1 heading 2;\lsdsemihidden1\lsdunhideused1\lsdqformat1 heading 3;
\lsdsemihidden1\lsdunhideused1\lsdqformat1 heading 4;\lsdsemihidden1\lsdunhideused1\lsdqformat1 heading 5;\lsdsemihidden1\lsdunhideused1\lsdqformat1 heading 6;\lsdsemihidden1\lsdunhideused1\lsdqformat1 heading 7;\lsdsemihidden1\lsdunhideused1\lsdqformat1 heading 8;
\lsdsemihidden1\lsdunhideused1\lsdqformat1 heading 9;\lsdsemihidden1\lsdunhideused1\lsdqformat1 caption;\lsdqformat1 Title;\lsdqformat1 Subtitle;\lsdqformat1 Strong;\lsdqformat1 Emphasis;\lsdsemihidden1\lsdpriority99 Placeholder Text;\lsdqformat1\lsdpriority1 No Spacing;
\lsdpriority60 Light Shading;\lsdpriority61 Light List;\lsdpriority62 Light Grid;\lsdpriority63 Medium Shading 1;\lsdpriority64 Medium Shading 2;\lsdpriority65 Medium List 1;\lsdpriority66 Medium List 2;\lsdpriority67 Medium Grid 1;\lsdpriority68 Medium Grid 2;
\lsdpriority69 Medium Grid 3;\lsdpriority70 Dark List;\lsdpriority71 Colorful Shading;\lsdpriority72 Colorful List;\lsdpriority73 Colorful Grid;\lsdpriority60 Light Shading Accent 1;\lsdpriority61 Light List Accent 1;\lsdpriority62 Light Grid Accent 1;\lsdpriority63 Medium Shading 1 Accent 1;
\lsdpriority64 Medium Shading 2 Accent 1;\lsdpriority65 Medium List 1 Accent 1;\lsdsemihidden1\lsdpriority99 Revision;\lsdqformat1\lsdpriority34 List Paragraph;\lsdqformat1\lsdpriority29 Quote;\lsdqformat1\lsdpriority30 Intense Quote;\lsdpriority66 Medium List 2 Accent 1;
\lsdpriority67 Medium Grid 1 Accent 1;\lsdpriority68 Medium Grid 2 Accent 1;\lsdpriority69 Medium Grid 3 Accent 1;\lsdpriority70 Dark List Accent 1;\lsdpriority71 Colorful Shading Accent 1;\lsdpriority72 Colorful List Accent 1;\lsdpriority73 Colorful Grid Accent 1;
\lsdpriority60 Light Shading Accent 2;\lsdpriority61 Light List Accent 2;\lsdpriority62 Light Grid Accent 2;\lsdpriority63 Medium Shading 1 Accent 2;\lsdpriority64 Medium Shading 2 Accent 2;\lsdpriority65 Medium List 1 Accent 2;\lsdpriority66 Medium List 2 Accent 2;
\lsdpriority67 Medium Grid 1 Accent 2;\lsdpriority68 Medium Grid 2 Accent 2;\lsdpriority69 Medium Grid 3 Accent 2;\lsdpriority70 Dark List Accent 2;\lsdpriority71 Colorful Shading Accent 2;\lsdpriority72 Colorful List Accent 2;\lsdpriority73 Colorful Grid Accent 2;
\lsdpriority60 Light Shading Accent 3;\lsdpriority61 Light List Accent 3;\lsdpriority62 Light Grid Accent 3;\lsdpriority63 Medium Shading 1 Accent 3;\lsdpriority64 Medium Shading 2 Accent 3;\lsdpriority65 Medium List 1 Accent 3;\lsdpriority66 Medium List 2 Accent 3;
\lsdpriority67 Medium Grid 1 Accent 3;\lsdpriority68 Medium Grid 2 Accent 3;\lsdpriority69 Medium Grid 3 Accent 3;\lsdpriority70 Dark List Accent 3;\lsdpriority71 Colorful Shading Accent 3;\lsdpriority72 Colorful List Accent 3;\lsdpriority73 Colorful Grid Accent 3;
\lsdpriority60 Light Shading Accent 4;\lsdpriority61 Light List Accent 4;\lsdpriority62 Light Grid Accent 4;\lsdpriority63 Medium Shading 1 Accent 4;\lsdpriority64 Medium Shading 2 Accent 4;\lsdpriority65 Medium List 1 Accent 4;\lsdpriority66 Medium List 2 Accent 4;
\lsdpriority67 Medium Grid 1 Accent 4;\lsdpriority68 Medium Grid 2 Accent 4;\lsdpriority69 Medium Grid 3 Accent 4;\lsdpriority70 Dark List Accent 4;\lsdpriority71 Colorful Shading Accent 4;\lsdpriority72 Colorful List Accent 4;\lsdpriority73 Colorful Grid Accent 4;
\lsdpriority60 Light Shading Accent 5;\lsdpriority61 Light List Accent 5;\lsdpriority62 Light Grid Accent 5;\lsdpriority63 Medium Shading 1 Accent 5;\lsdpriority64 Medium Shading 2 Accent 5;\lsdpriority65 Medium List 1 Accent 5;\lsdpriority66 Medium List 2 Accent 5;
\lsdpriority67 Medium Grid 1 Accent 5;\lsdpriority68 Medium Grid 2 Accent 5;\lsdpriority69 Medium Grid 3 Accent 5;\lsdpriority70 Dark List Accent 5;\lsdpriority71 Colorful Shading Accent 5;\lsdpriority72 Colorful List Accent 5;\lsdpriority73 Colorful Grid Accent 5;
\lsdpriority60 Light Shading Accent 6;\lsdpriority61 Light List Accent 6;\lsdpriority62 Light Grid Accent 6;\lsdpriority63 Medium Shading 1 Accent 6;\lsdpriority64 Medium Shading 2 Accent 6;\lsdpriority65 Medium List 1 Accent 6;\lsdpriority66 Medium List 2 Accent 6;
\lsdpriority67 Medium Grid 1 Accent 6;\lsdpriority68 Medium Grid 2 Accent 6;\lsdpriority69 Medium Grid 3 Accent 6;\lsdpriority70 Dark List Accent 6;\lsdpriority71 Colorful Shading Accent 6;\lsdpriority72 Colorful List Accent 6;\lsdpriority73 Colorful Grid Accent 6;
\lsdqformat1\lsdpriority19 Subtle Emphasis;\lsdqformat1\lsdpriority21 Intense Emphasis;\lsdqformat1\lsdpriority31 Subtle Reference;\lsdqformat1\lsdpriority32 Intense Reference;\lsdqformat1\lsdpriority33 Book Title;\lsdsemihidden1\lsdunhideused1\lsdpriority37 Bibliography;
\lsdsemihidden1\lsdunhideused1\lsdqformat1\lsdpriority39 TOC Heading;}}}

View File

@@ -1,106 +0,0 @@
%PDF-1.3
%“Œ‹ž ReportLab Generated PDF document http://www.reportlab.com
1 0 obj
<<
/F1 2 0 R
>>
endobj
2 0 obj
<<
/BaseFont /Helvetica /Encoding /WinAnsiEncoding /Name /F1 /Subtype /Type1 /Type /Font
>>
endobj
3 0 obj
<<
/Contents 9 0 R /MediaBox [ 0 0 612 792 ] /Parent 8 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
4 0 obj
<<
/Contents 10 0 R /MediaBox [ 0 0 612 792 ] /Parent 8 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
5 0 obj
<<
/Contents 11 0 R /MediaBox [ 0 0 612 792 ] /Parent 8 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
6 0 obj
<<
/PageMode /UseNone /Pages 8 0 R /Type /Catalog
>>
endobj
7 0 obj
<<
/Author (anonymous) /CreationDate (D:20240718233034+00'00') /Creator (ReportLab PDF Library - www.reportlab.com) /Keywords () /ModDate (D:20240718233034+00'00') /Producer (ReportLab PDF Library - www.reportlab.com)
/Subject (unspecified) /Title (untitled) /Trapped /False
>>
endobj
8 0 obj
<<
/Count 3 /Kids [ 3 0 R 4 0 R 5 0 R ] /Type /Pages
>>
endobj
9 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 210
>>
stream
Gap@Gb79+X'F"5[`EfJOD4:mD<%*=m+N>oDG,>NK`<U'B^0WYY,dWl^i_UcRk`<"L=<NPC$BtQ<5l$3<Y!?BuoCSYQ6GSt25lpqr0IrP?S[b)9%M"e'HHFqcRO'9eRaR0'DYi*Y.:nEMFAoTM;rPL%EF]`CfoELVl_Q,"LS:%iI;Nc[&bG.*65O]ecfK1'*<>5P_s[usI/ph*0pV~>endstream
endobj
10 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 209
>>
stream
Gap@Gb79+X'F"5Y`EfJOV2A9=!fB]F'tK1LS`,]G+MiTenb&V2-^hqa(5IE#Nr59/!"Qm*5_(BdF!0&h!Yhk/A+\iS'%6tuO$O)9LaZS+flr([1p2&#RS1p/gT[B;rDj-=&=iqUlj(P^/5U@eCFqn4:<lU`l`.HXqG-',hJH.DI.(6L\luSAW`Q'oje[qgVLVIXg%PXe+,<$7('~>endstream
endobj
11 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 209
>>
stream
Gap@GbmK%f(e+0_`ODoa2.):e/i+N3r(.o*Qf\gSNb(bt4FIubi@GIOE=p8Ir3;CbQ@KuG^cdJhODZKQ*upt+*rdZ%!mFmN$*.P)K;`s#]G=8AO3s3DGB.RCOn?[F]bEIg,a>25?B%dh\Z/C6opFE'el@I,P\u\V\]:*JYrrsNJ&d,11VL;$h!43eGu&1X6$+5-h\Vr6!+>4Je,~>endstream
endobj
xref
0 12
0000000000 65535 f
0000000073 00000 n
0000000104 00000 n
0000000211 00000 n
0000000404 00000 n
0000000598 00000 n
0000000792 00000 n
0000000860 00000 n
0000001156 00000 n
0000001227 00000 n
0000001527 00000 n
0000001827 00000 n
trailer
<<
/ID
[<0d5cf047e754e05f8d574f067785875c><0d5cf047e754e05f8d574f067785875c>]
% ReportLab generated PDF document -- digest (http://www.reportlab.com)
/Info 7 0 R
/Root 6 0 R
/Size 12
>>
startxref
2127
%%EOF

View File

@@ -1,106 +0,0 @@
%PDF-1.3
%“Œ‹ž ReportLab Generated PDF document http://www.reportlab.com
1 0 obj
<<
/F1 2 0 R
>>
endobj
2 0 obj
<<
/BaseFont /Helvetica /Encoding /WinAnsiEncoding /Name /F1 /Subtype /Type1 /Type /Font
>>
endobj
3 0 obj
<<
/Contents 9 0 R /MediaBox [ 0 0 612 792 ] /Parent 8 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
4 0 obj
<<
/Contents 10 0 R /MediaBox [ 0 0 612 792 ] /Parent 8 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
5 0 obj
<<
/Contents 11 0 R /MediaBox [ 0 0 612 792 ] /Parent 8 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
6 0 obj
<<
/PageMode /UseNone /Pages 8 0 R /Type /Catalog
>>
endobj
7 0 obj
<<
/Author (anonymous) /CreationDate (D:20240718233034+00'00') /Creator (ReportLab PDF Library - www.reportlab.com) /Keywords () /ModDate (D:20240718233034+00'00') /Producer (ReportLab PDF Library - www.reportlab.com)
/Subject (unspecified) /Title (untitled) /Trapped /False
>>
endobj
8 0 obj
<<
/Count 3 /Kids [ 3 0 R 4 0 R 5 0 R ] /Type /Pages
>>
endobj
9 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 207
>>
stream
Gap@G:CDb.*/<p2MVk["e@)7*Z0@"b%+@f/9pA%_U<oOkVp?PnGRb81iPg?0i?(]%^_CSf##%;<!7Ne/-%RR^p@t7hKYZ9eJVHV]fjjHIB:6DrW+2\p16@*`r^CpQZZH'2Pjqd<.&hM2UO%$Wi$te%4QmS;<E"QS\!deQG_XtuEK>b(UbS>%`/0S`k\\5'TNY0mmgH?`8]i_0~>endstream
endobj
10 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 207
>>
stream
Gap@G]afWJ'Lm;=if<;s>V*7BTJ]oQ@P!(q5S+WG1%>L@?8Ue;c>[fY&&IOd5@t@TY@+q.5T<Z'81"J("KhsBa+&u4"n'#6)AjfImh)%$0tVC:aGk",=aJJH#/4]i.WJr9c"cibYm:M-44<%FFlG0Cl\Z'nmo7C"TR+7dk3T#iD(9Pq'\;rQku%o>A_`50SO&7M04=8M'O<Am~>endstream
endobj
11 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 209
>>
stream
Gap@GYmu@>'Ld5[if35r/JNaJ.A.7fP9RpSN*8k^-sEER0,enq1Rsuo@R/uCO-^&Y`F'9d^a?9)?ns+F&dXm[HMgPn6Ep+%TRk5Nh+!(+[H#H:U^.^(YL,PKS'%j/:3O\hJVEK-UUekJTd[A$N^((K^#0Du`i@,/^f5KiUISGr")3/+f9NF8NO1+iUgm^b"X\cE^+[:s!0]Gu6i~>endstream
endobj
xref
0 12
0000000000 65535 f
0000000073 00000 n
0000000104 00000 n
0000000211 00000 n
0000000404 00000 n
0000000598 00000 n
0000000792 00000 n
0000000860 00000 n
0000001156 00000 n
0000001227 00000 n
0000001524 00000 n
0000001822 00000 n
trailer
<<
/ID
[<407fc55425168745e56176202aad30c9><407fc55425168745e56176202aad30c9>]
% ReportLab generated PDF document -- digest (http://www.reportlab.com)
/Info 7 0 R
/Root 6 0 R
/Size 12
>>
startxref
2122
%%EOF

View File

@@ -1,106 +0,0 @@
%PDF-1.3
%“Œ‹ž ReportLab Generated PDF document http://www.reportlab.com
1 0 obj
<<
/F1 2 0 R
>>
endobj
2 0 obj
<<
/BaseFont /Helvetica /Encoding /WinAnsiEncoding /Name /F1 /Subtype /Type1 /Type /Font
>>
endobj
3 0 obj
<<
/Contents 9 0 R /MediaBox [ 0 0 612 792 ] /Parent 8 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
4 0 obj
<<
/Contents 10 0 R /MediaBox [ 0 0 612 792 ] /Parent 8 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
5 0 obj
<<
/Contents 11 0 R /MediaBox [ 0 0 612 792 ] /Parent 8 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
6 0 obj
<<
/PageMode /UseNone /Pages 8 0 R /Type /Catalog
>>
endobj
7 0 obj
<<
/Author (anonymous) /CreationDate (D:20240718233034+00'00') /Creator (ReportLab PDF Library - www.reportlab.com) /Keywords () /ModDate (D:20240718233034+00'00') /Producer (ReportLab PDF Library - www.reportlab.com)
/Subject (unspecified) /Title (untitled) /Trapped /False
>>
endobj
8 0 obj
<<
/Count 3 /Kids [ 3 0 R 4 0 R 5 0 R ] /Type /Pages
>>
endobj
9 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 209
>>
stream
Gap@G]+0EH(e/_@iZH]:>:>hu1e>07BJg5<'#:.C1n)e#(QJ6R1Rsuo_gpn.+0-H5$/#"iYR[B.9\'>7!aDAC*rf/t&6O#aH<?-7IT'\?X(&TcABG=ON*Nq`4k=o&p@3,0*31r<)TAP2Pk94p0\"R-_sY1$AYo[8B\?4R>feLAB\mpjZhp"`@J3;"Fm97#9+W,"eb95\+#p\^HN~>endstream
endobj
10 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 209
>>
stream
Gap@G]+0EX'Eriuig+>QHNeD'#n%Sq#n%BW`C'uDUOYK)HdS4E9JMsp+HUmDj&H-t*4?UamXX0peVspk"i_@ba+&u"J>UYDKV_^G,7V==aTZZ<YO7:sNSQ[6"Ja-29NtYjd#=`J@D'h+[QW=:EEb?A<k!f+\`g^?,Vgp7_)91[lR\f.Tkf7VIPLVYM&deF!aYt9Ip^"N",3F'*W~>endstream
endobj
11 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 209
>>
stream
Gap@G]+0EH(e/_@iZH]:>J`g!jPCLm;?AgU"fdk"PQZD\d?lRI_oWc[$tp^]O\:3fK8kWeX2&Jcg0+RoJ]j;2j*upu!b4.o&f)b$I@7CfIYjP^#\VjhC=QhQ]^lV-@<0Tam!0.+Dn@("AK%N,Uc7hb+6VoQ$q2q[7]BB92RoY/.j2N028i1jNf'@<1+Fqf$1&"8omHk`#DHP>OT~>endstream
endobj
xref
0 12
0000000000 65535 f
0000000073 00000 n
0000000104 00000 n
0000000211 00000 n
0000000404 00000 n
0000000598 00000 n
0000000792 00000 n
0000000860 00000 n
0000001156 00000 n
0000001227 00000 n
0000001526 00000 n
0000001826 00000 n
trailer
<<
/ID
[<80da26147a484f2b7573da8151a93d2e><80da26147a484f2b7573da8151a93d2e>]
% ReportLab generated PDF document -- digest (http://www.reportlab.com)
/Info 7 0 R
/Root 6 0 R
/Size 12
>>
startxref
2126
%%EOF

Binary file not shown.

View File

@@ -1,106 +0,0 @@
%PDF-1.3
%“Œ‹ž ReportLab Generated PDF document http://www.reportlab.com
1 0 obj
<<
/F1 2 0 R
>>
endobj
2 0 obj
<<
/BaseFont /Helvetica /Encoding /WinAnsiEncoding /Name /F1 /Subtype /Type1 /Type /Font
>>
endobj
3 0 obj
<<
/Contents 9 0 R /MediaBox [ 0 0 612 792 ] /Parent 8 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
4 0 obj
<<
/Contents 10 0 R /MediaBox [ 0 0 612 792 ] /Parent 8 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
5 0 obj
<<
/Contents 11 0 R /MediaBox [ 0 0 612 792 ] /Parent 8 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
6 0 obj
<<
/PageMode /UseNone /Pages 8 0 R /Type /Catalog
>>
endobj
7 0 obj
<<
/Author (anonymous) /CreationDate (D:20240718233034+00'00') /Creator (ReportLab PDF Library - www.reportlab.com) /Keywords () /ModDate (D:20240718233034+00'00') /Producer (ReportLab PDF Library - www.reportlab.com)
/Subject (unspecified) /Title (untitled) /Trapped /False
>>
endobj
8 0 obj
<<
/Count 3 /Kids [ 3 0 R 4 0 R 5 0 R ] /Type /Pages
>>
endobj
9 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 206
>>
stream
Gap@G\IO3f&4Lr[@S4&T2aReWZ3N'9",Ncra>5AuK^J(o@r?=EP>b]h[L@XZ8q7#[c:#H2:^/=b,p3^,&f-Q.'H%!U?%N\iVa1pLMlh/41\A8@dF5@0al:-1?L;D%LpL3g\9`.3c6N/Mp=sE/nO%^@%Cc3`]e`qqS@[pkUWemMZC<P\fkqa55u)*hIUoU437-gb!e_*&B/,&~>endstream
endobj
10 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 209
>>
stream
Gap@G\IO3V'LdA_ig"8P1PS=kA5Q_GQ\P]*S3\>Q`jHYt?8UdkV`6]UV*On)+1VMV+A@.iF:*6sWfM9f"s.NmVuMto!p7-+,Rb<.h,pdi-&OQ5KO\RRFj.j"A)ScTQ7$hudF^TnZ'XuQA5"O]rYkt><-DJmj'"Ri>n!4`^m409XX`e)AR'*rGsn6m79.18+^ba=qRuss"-A3k+9~>endstream
endobj
11 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 210
>>
stream
Gap@G]+0EH(e/_@iZH]:.1fBHK`Xl'[i1&AjX(\k8hbgo(QJ6R1Rsuo6_I1A5Gg$JL;D#$J2CX;+Cf*cUHk2%H1XmpWe+qZ5moJ#B]>b%%[d,mfSSkS4A:Q4NlOFfrL7eA,s45"eUSakM;927AA,1"-LZ)&nZ/ah=8_X7:?ZMj@J@;r7d`t]Z0\d39M%:$k8[S5D"2oSap4s80l?~>endstream
endobj
xref
0 12
0000000000 65535 f
0000000073 00000 n
0000000104 00000 n
0000000211 00000 n
0000000404 00000 n
0000000598 00000 n
0000000792 00000 n
0000000860 00000 n
0000001156 00000 n
0000001227 00000 n
0000001523 00000 n
0000001823 00000 n
trailer
<<
/ID
[<88edee24ee67bd7d6b7cf53cfa2222b0><88edee24ee67bd7d6b7cf53cfa2222b0>]
% ReportLab generated PDF document -- digest (http://www.reportlab.com)
/Info 7 0 R
/Root 6 0 R
/Size 12
>>
startxref
2124
%%EOF

View File

@@ -1,106 +0,0 @@
%PDF-1.3
%“Œ‹ž ReportLab Generated PDF document http://www.reportlab.com
1 0 obj
<<
/F1 2 0 R
>>
endobj
2 0 obj
<<
/BaseFont /Helvetica /Encoding /WinAnsiEncoding /Name /F1 /Subtype /Type1 /Type /Font
>>
endobj
3 0 obj
<<
/Contents 9 0 R /MediaBox [ 0 0 612 792 ] /Parent 8 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
4 0 obj
<<
/Contents 10 0 R /MediaBox [ 0 0 612 792 ] /Parent 8 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
5 0 obj
<<
/Contents 11 0 R /MediaBox [ 0 0 612 792 ] /Parent 8 0 R /Resources <<
/Font 1 0 R /ProcSet [ /PDF /Text /ImageB /ImageC /ImageI ]
>> /Rotate 0 /Trans <<
>>
/Type /Page
>>
endobj
6 0 obj
<<
/PageMode /UseNone /Pages 8 0 R /Type /Catalog
>>
endobj
7 0 obj
<<
/Author (anonymous) /CreationDate (D:20240718233034+00'00') /Creator (ReportLab PDF Library - www.reportlab.com) /Keywords () /ModDate (D:20240718233034+00'00') /Producer (ReportLab PDF Library - www.reportlab.com)
/Subject (unspecified) /Title (untitled) /Trapped /False
>>
endobj
8 0 obj
<<
/Count 3 /Kids [ 3 0 R 4 0 R 5 0 R ] /Type /Pages
>>
endobj
9 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 209
>>
stream
Gap@GYmu@>'Ld5[if35rI0]sG)F[U^"c>T)"\\os-r:1V0,enq1Rsuo,*67.@k7U.LRF-P.e"CM2V!>iYi<g`nXh!K?n@$t^rY1$+^0'>=B8H6e;F1WmG#,(eS00(Qe9&:O@nI879DTsT,njXAB?`8:>,Hn3*RV!qh4;&@6%]<9Y*>QZ].Z5o;RAZXg7d[#+bphHs_Ep!QR2TZ2~>endstream
endobj
10 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 210
>>
stream
Gap@G]+0EH(e/_@iZH]:>=,iY1bE)XN?M;1'J/>i&HY;gks]*rj:!DKpb8@`prC#N+9E#o#-<G*!#p7e6j-1sX2k5S,6XmM"taYkfK^k">%usEeEk=sR<UT"dm`rXD;!S`_jS9LU+(R%e'V%WSMfHP.pXZEQqTQq=&D[I[PS(41(NIAZ1R/U?:Z=hSXu!NDF)bpG2F+/I/q/u1-Y~>endstream
endobj
11 0 obj
<<
/Filter [ /ASCII85Decode /FlateDecode ] /Length 209
>>
stream
Gap@G_$YcZ'LhbF`EQB$nqi=8S<;#HbK3&f>rnodRPo`Vf4P[3cJidY(I=[K5NWCT'<lHgci?oCRVNST&[k#q4oSC0FWgAt1pD4d_(hIRjn_Nt+cFgJlfm[1U8@/M4r^Pk<@F!@e?%/!-Vq;]nfdLi9]P2M)ck9?)%oNXa_\N<-d"(pjlH%-G`T@Sj&P(j6.@#Xh\Vr6!1iI2/H~>endstream
endobj
xref
0 12
0000000000 65535 f
0000000073 00000 n
0000000104 00000 n
0000000211 00000 n
0000000404 00000 n
0000000598 00000 n
0000000792 00000 n
0000000860 00000 n
0000001156 00000 n
0000001227 00000 n
0000001526 00000 n
0000001827 00000 n
trailer
<<
/ID
[<4fcc82a085fe71e34a32d1b23c8b939f><4fcc82a085fe71e34a32d1b23c8b939f>]
% ReportLab generated PDF document -- digest (http://www.reportlab.com)
/Info 7 0 R
/Root 6 0 R
/Size 12
>>
startxref
2127
%%EOF

View File

@@ -1,21 +0,0 @@
import os
def before_all(context):
context.endpoint = None
context.request_data = None
context.files = {}
context.response = None
def after_scenario(context, scenario):
if hasattr(context, 'files'):
for file in context.files.values():
file.close()
if os.path.exists('response_file'):
os.remove('response_file')
if hasattr(context, 'file_name') and os.path.exists(context.file_name):
os.remove(context.file_name)
# Remove any temporary files
for temp_file in os.listdir('.'):
if temp_file.startswith('genericNonCustomisableName') or temp_file.startswith('temp_image_'):
os.remove(temp_file)

View File

@@ -1,130 +0,0 @@
@example @general
Feature: API Validation
@positive @password
Scenario: Remove password
Given I generate a PDF file as "fileInput"
And the pdf contains 3 pages
And the pdf is encrypted with password "password123"
And the request data includes
| parameter | value |
| password | password123 |
When I send the API request to the endpoint "/api/v1/security/remove-password"
Then the response content type should be "application/pdf"
And the response file should have size greater than 0
And the response PDF is not passworded
And the response status code should be 200
@negative @password
Scenario: Remove password wrong password
Given I generate a PDF file as "fileInput"
And the pdf contains 3 pages
And the pdf is encrypted with password "password123"
And the request data includes
| parameter | value |
| password | wrongPassword |
When I send the API request to the endpoint "/api/v1/security/remove-password"
Then the response status code should be 500
And the response should contain error message "Internal Server Error"
@positive @info
Scenario: Get info
Given I generate a PDF file as "fileInput"
When I send the API request to the endpoint "/api/v1/security/get-info-on-pdf"
Then the response content type should be "application/json"
And the response file should have size greater than 100
And the response status code should be 200
@positive @password
Scenario: Add password
Given I generate a PDF file as "fileInput"
And the pdf contains 3 pages
And the request data includes
| parameter | value |
| password | password123 |
When I send the API request to the endpoint "/api/v1/security/add-password"
Then the response content type should be "application/pdf"
And the response file should have size greater than 100
And the response PDF is passworded
And the response status code should be 200
@positive @password
Scenario: Add password with other params
Given I generate a PDF file as "fileInput"
And the pdf contains 3 pages
And the request data includes
| parameter | value |
| ownerPassword | ownerPass |
| password | password123 |
| keyLength | 256 |
| canPrint | true |
| canModify | false |
When I send the API request to the endpoint "/api/v1/security/add-password"
Then the response content type should be "application/pdf"
And the response file should have size greater than 100
And the response PDF is passworded
And the response status code should be 200
@positive @watermark
Scenario: Add watermark
Given I generate a PDF file as "fileInput"
And the pdf contains 3 pages
And the request data includes
| parameter | value |
| watermarkType | text |
| watermarkText | Sample Watermark |
| fontSize | 30 |
| rotation | 45 |
| opacity | 0.5 |
| widthSpacer | 50 |
| heightSpacer | 50 |
When I send the API request to the endpoint "/api/v1/security/add-watermark"
Then the response content type should be "application/pdf"
And the response file should have size greater than 100
And the response status code should be 200
@positive
Scenario: Remove blank pages
Given I generate a PDF file as "fileInput"
And the pdf contains 3 blank pages
And the request data includes
| parameter | value |
| threshold | 90 |
| whitePercent | 99.9 |
When I send the API request to the endpoint "/api/v1/misc/remove-blanks"
Then the response content type should be "application/octet-stream"
And the response file should have extension ".zip"
And the response ZIP should contain 1 files
And the response file should have size greater than 0
@positive @flatten
Scenario: Flatten PDF
Given I generate a PDF file as "fileInput"
And the request data includes
| parameter | value |
| flattenOnlyForms | false |
When I send the API request to the endpoint "/api/v1/misc/flatten"
Then the response content type should be "application/pdf"
And the response file should have size greater than 0
And the response status code should be 200
@positive @metadata
Scenario: Update metadata
Given I generate a PDF file as "fileInput"
And the request data includes
| parameter | value |
| author | John Doe |
| title | Sample Title |
| subject | Sample Subject |
| keywords | sample, test |
| producer | Test Producer |
When I send the API request to the endpoint "/api/v1/misc/update-metadata"
Then the response content type should be "application/pdf"
And the response file should have size greater than 0
And the response PDF metadata should include "Author" as "John Doe"
And the response PDF metadata should include "Keywords" as "sample, test"
And the response PDF metadata should include "Subject" as "Sample Subject"
And the response PDF metadata should include "Title" as "Sample Title"
And the response status code should be 200

View File

@@ -1,223 +0,0 @@
Feature: API Validation
@libre @positive
Scenario: Repair PDF
Given I generate a PDF file as "fileInput"
When I send the API request to the endpoint "/api/v1/misc/repair"
Then the response content type should be "application/pdf"
And the response file should have size greater than 0
And the response status code should be 200
@ocr @positive
Scenario: Process PDF with OCR
Given I generate a PDF file as "fileInput"
And the request data includes
| parameter | value |
| languages | eng |
| sidecar | false |
| deskew | true |
| clean | true |
| cleanFinal | true |
| ocrType | Normal |
| ocrRenderType | hocr |
| removeImagesAfter| false |
When I send the API request to the endpoint "/api/v1/misc/ocr-pdf"
Then the response content type should be "application/pdf"
And the response file should have size greater than 0
And the response status code should be 200
@ocr @positive
Scenario: Extract Image Scans
Given I generate a PDF file as "fileInput"
And the pdf contains 3 images of size 300x300 on 2 pages
And the request data includes
| parameter | value |
| angleThreshold | 5 |
| tolerance | 20 |
| minArea | 8000 |
| minContourArea | 500 |
| borderSize | 1 |
When I send the API request to the endpoint "/api/v1/misc/extract-image-scans"
Then the response content type should be "application/octet-stream"
And the response file should have extension ".zip"
And the response ZIP should contain 2 files
And the response file should have size greater than 0
And the response status code should be 200
@ocr @negative
Scenario: Process PDF with text and OCR with type normal
Given I generate a PDF file as "fileInput"
And the pdf contains 3 pages with random text
And the request data includes
| parameter | value |
| languages | eng |
| sidecar | false |
| deskew | true |
| clean | true |
| cleanFinal | true |
| ocrType | Normal |
| ocrRenderType | hocr |
| removeImagesAfter| false |
When I send the API request to the endpoint "/api/v1/misc/ocr-pdf"
Then the response status code should be 500
@ocr @positive
Scenario: Process PDF with OCR
Given I generate a PDF file as "fileInput"
And the request data includes
| parameter | value |
| languages | eng |
| sidecar | false |
| deskew | true |
| clean | true |
| cleanFinal | true |
| ocrType | Force |
| ocrRenderType | hocr |
| removeImagesAfter| false |
When I send the API request to the endpoint "/api/v1/misc/ocr-pdf"
Then the response content type should be "application/pdf"
And the response file should have size greater than 0
And the response status code should be 200
@ocr @positive
Scenario: Process PDF with OCR with sidecar
Given I generate a PDF file as "fileInput"
And the request data includes
| parameter | value |
| languages | eng |
| sidecar | true |
| deskew | true |
| clean | true |
| cleanFinal | true |
| ocrType | Force |
| ocrRenderType | hocr |
| removeImagesAfter| false |
When I send the API request to the endpoint "/api/v1/misc/ocr-pdf"
Then the response content type should be "application/octet-stream"
And the response file should have extension ".zip"
And the response ZIP should contain 2 files
And the response file should have size greater than 0
And the response status code should be 200
@libre @positive
Scenario Outline: Convert PDF to various word formats
Given I generate a PDF file as "fileInput"
And the pdf contains 3 pages with random text
And the request data includes
| parameter | value |
| outputFormat | <format> |
When I send the API request to the endpoint "/api/v1/convert/pdf/word"
Then the response status code should be 200
And the response file should have size greater than 100
And the response file should have extension "<extension>"
Examples:
| format | extension |
| docx | .docx |
| odt | .odt |
| doc | .doc |
@ocr
Scenario: PDFA
Given I use an example file at "exampleFiles/pdfa2.pdf" as parameter "fileInput"
And the request data includes
| parameter | value |
| outputFormat | pdfa |
When I send the API request to the endpoint "/api/v1/convert/pdf/pdfa"
Then the response status code should be 200
And the response file should have extension ".pdf"
And the response file should have size greater than 100
@ocr
Scenario: PDFA1
Given I use an example file at "exampleFiles/pdfa1.pdf" as parameter "fileInput"
And the request data includes
| parameter | value |
| outputFormat | pdfa-1 |
When I send the API request to the endpoint "/api/v1/convert/pdf/pdfa"
Then the response status code should be 200
And the response file should have extension ".pdf"
And the response file should have size greater than 100
@compress @ghostscript @positive
Scenario: Compress
Given I use an example file at "exampleFiles/ghost3.pdf" as parameter "fileInput"
And the request data includes
| parameter | value |
| optimizeLevel | 4 |
When I send the API request to the endpoint "/api/v1/misc/compress-pdf"
Then the response status code should be 200
And the response file should have extension ".pdf"
And the response file should have size greater than 100
@compress @ghostscript @positive
Scenario: Compress
Given I use an example file at "exampleFiles/ghost2.pdf" as parameter "fileInput"
And the request data includes
| parameter | value |
| optimizeLevel | 1 |
| expectedOutputSize | 5KB |
When I send the API request to the endpoint "/api/v1/misc/compress-pdf"
Then the response status code should be 200
And the response file should have extension ".pdf"
And the response file should have size greater than 100
@compress @ghostscript @positive
Scenario: Compress
Given I use an example file at "exampleFiles/ghost1.pdf" as parameter "fileInput"
And the request data includes
| parameter | value |
| optimizeLevel | 1 |
| expectedOutputSize | 5KB |
When I send the API request to the endpoint "/api/v1/misc/compress-pdf"
Then the response status code should be 200
And the response file should have extension ".pdf"
And the response file should have size greater than 100
@libre @positive
Scenario Outline: Convert PDF to various types
Given I generate a PDF file as "fileInput"
And the pdf contains 3 pages with random text
And the request data includes
| parameter | value |
| outputFormat | <format> |
When I send the API request to the endpoint "/api/v1/convert/pdf/<type>"
Then the response status code should be 200
And the response file should have size greater than 100
And the response file should have extension "<extension>"
Examples:
| type | format | extension |
| text | rtf | .rtf |
| text | txt | .txt |
| presentation | ppt | .ppt |
| presentation | pptx | .pptx |
| presentation | odp | .odp |
| html | html | .zip |
@libre @positive @topdf
Scenario Outline: Convert PDF to various types
Given I use an example file at "exampleFiles/example<extension>" as parameter "fileInput"
When I send the API request to the endpoint "/api/v1/convert/file/pdf"
Then the response status code should be 200
And the response file should have size greater than 100
And the response file should have extension ".pdf"
Examples:
| extension |
| .docx |
| .odp |
| .odt |
| .pptx |
| .rtf |

View File

@@ -1,114 +0,0 @@
@general
Feature: API Validation
@split-pdf-by-sections @positive
Scenario Outline: split-pdf-by-sections with different parameters
Given I generate a PDF file as "fileInput"
And the pdf contains 2 pages
And the request data includes
| parameter | value |
| horizontalDivisions | <horizontalDivisions> |
| verticalDivisions | <verticalDivisions> |
| merge | true |
When I send the API request to the endpoint "/api/v1/general/split-pdf-by-sections"
Then the response content type should be "application/pdf"
And the response file should have size greater than 200
And the response status code should be 200
And the response PDF should contain <page_count> pages
Examples:
| horizontalDivisions | verticalDivisions | page_count |
| 0 | 1 | 4 |
| 1 | 1 | 8 |
| 1 | 2 | 12 |
| 2 | 2 | 18 |
@split-pdf-by-sections @positive
Scenario Outline: split-pdf-by-sections with different parameters
Given I generate a PDF file as "fileInput"
And the pdf contains 2 pages
And the request data includes
| parameter | value |
| horizontalDivisions | <horizontalDivisions> |
| verticalDivisions | <verticalDivisions> |
| merge | true |
When I send the API request to the endpoint "/api/v1/general/split-pdf-by-sections"
Then the response content type should be "application/pdf"
And the response file should have size greater than 200
And the response status code should be 200
And the response PDF should contain <page_count> pages
Examples:
| horizontalDivisions | verticalDivisions | page_count |
| 0 | 1 | 4 |
| 1 | 1 | 8 |
| 1 | 2 | 12 |
| 2 | 2 | 18 |
@split-pdf-by-pages @positive
Scenario Outline: split-pdf-by-pages with different parameters
Given I generate a PDF file as "fileInput"
And the pdf contains 20 pages
And the request data includes
| parameter | value |
| fileInput | fileInput |
| pageNumbers | <pageNumbers> |
When I send the API request to the endpoint "/api/v1/general/split-pages"
Then the response content type should be "application/octet-stream"
And the response status code should be 200
And the response file should have size greater than 200
And the response ZIP should contain <file_count> files
Examples:
| pageNumbers | file_count |
| 1,3,5-9 | 8 |
| all | 20 |
| 2n+1 | 11 |
| 3n | 7 |
@split-pdf-by-size-or-count @positive
Scenario Outline: split-pdf-by-size-or-count with different parameters
Given I generate a PDF file as "fileInput"
And the pdf contains 20 pages
And the request data includes
| parameter | value |
| fileInput | fileInput |
| splitType | <splitType> |
| splitValue | <splitValue> |
When I send the API request to the endpoint "/api/v1/general/split-by-size-or-count"
Then the response content type should be "application/octet-stream"
And the response status code should be 200
And the response file should have size greater than 200
And the response ZIP file should contain <doc_count> documents each having <pages_per_doc> pages
Examples:
| splitType | splitValue | doc_count | pages_per_doc |
| 1 | 5 | 4 | 5 |
| 2 | 2 | 2 | 10 |
| 2 | 4 | 4 | 5 |
| 1 | 10 | 2 | 10 |
@extract-images
Scenario Outline: Extract Image Scans duplicates
Given I use an example file at "exampleFiles/images.pdf" as parameter "fileInput"
And the request data includes
| parameter | value |
| format | <format> |
When I send the API request to the endpoint "/api/v1/misc/extract-images"
Then the response content type should be "application/octet-stream"
And the response file should have extension ".zip"
And the response ZIP should contain 2 files
And the response file should have size greater than 0
And the response status code should be 200
Examples:
| format |
| png |
| gif |
| jpeg |

View File

@@ -1,381 +0,0 @@
import os
import requests
from behave import given, when, then
from PyPDF2 import PdfWriter, PdfReader
import io
import random
import string
from reportlab.lib.pagesizes import letter
from reportlab.lib.utils import ImageReader
from reportlab.pdfgen import canvas
import mimetypes
import requests
import zipfile
import shutil
import re
from PIL import Image, ImageDraw
#########
# GIVEN #
#########
@given('I generate a PDF file as "{fileInput}"')
def step_generate_pdf(context, fileInput):
context.param_name = fileInput
context.file_name = "genericNonCustomisableName.pdf"
writer = PdfWriter()
writer.add_blank_page(width=72, height=72) # Single blank page
with open(context.file_name, 'wb') as f:
writer.write(f)
if not hasattr(context, 'files'):
context.files = {}
context.files[context.param_name] = open(context.file_name, 'rb')
@given('I use an example file at "{filePath}" as parameter "{fileInput}"')
def step_use_example_file(context, filePath, fileInput):
context.param_name = fileInput
context.file_name = filePath.split('/')[-1]
if not hasattr(context, 'files'):
context.files = {}
# Ensure the file exists before opening
try:
example_file = open(filePath, 'rb')
context.files[context.param_name] = example_file
except FileNotFoundError:
raise FileNotFoundError(f"The example file '{filePath}' does not exist.")
@given('the pdf contains {page_count:d} pages')
def step_pdf_contains_pages(context, page_count):
writer = PdfWriter()
for i in range(page_count):
writer.add_blank_page(width=72, height=72)
with open(context.file_name, 'wb') as f:
writer.write(f)
context.files[context.param_name].close()
context.files[context.param_name] = open(context.file_name, 'rb')
# Duplicate for now...
@given('the pdf contains {page_count:d} blank pages')
def step_pdf_contains_blank_pages(context, page_count):
writer = PdfWriter()
for i in range(page_count):
writer.add_blank_page(width=72, height=72)
with open(context.file_name, 'wb') as f:
writer.write(f)
context.files[context.param_name].close()
context.files[context.param_name] = open(context.file_name, 'rb')
def create_black_box_image(file_name, size):
can = canvas.Canvas(file_name, pagesize=size)
width, height = size
can.setFillColorRGB(0, 0, 0)
can.rect(0, 0, width, height, fill=1)
can.showPage()
can.save()
@given(u'the pdf contains {image_count:d} images of size {width:d}x{height:d} on {page_count:d} pages')
def step_impl(context, image_count, width, height, page_count):
context.param_name = "fileInput"
context.file_name = "genericNonCustomisableName.pdf"
create_pdf_with_images_and_boxes(context.file_name, image_count, page_count, width, height)
if not hasattr(context, 'files'):
context.files = {}
context.files[context.param_name] = open(context.file_name, 'rb')
def add_black_boxes_to_image(image):
if isinstance(image, str):
image = Image.open(image)
draw = ImageDraw.Draw(image)
draw.rectangle([(0, 0), image.size], fill=(0, 0, 0)) # Fill image with black
return image
def create_pdf_with_images_and_boxes(file_name, image_count, page_count, image_width, image_height):
page_width, page_height = max(letter[0], image_width), max(letter[1], image_height)
boxes_per_page = image_count // page_count + (1 if image_count % page_count != 0 else 0)
writer = PdfWriter()
box_counter = 0
for page in range(page_count):
packet = io.BytesIO()
can = canvas.Canvas(packet, pagesize=(page_width, page_height))
for i in range(boxes_per_page):
if box_counter >= image_count:
break
# Simulating a dynamic image creation (replace this with your actual image creation logic)
# For demonstration, we'll create a simple black image
dummy_image = Image.new('RGB', (image_width, image_height), color='white') # Create a white image
dummy_image = add_black_boxes_to_image(dummy_image) # Add black boxes
# Convert the PIL Image to bytes to pass to drawImage
image_bytes = io.BytesIO()
dummy_image.save(image_bytes, format='PNG')
image_bytes.seek(0)
# Check if the image fits in the current page dimensions
x = (i % (page_width // image_width)) * image_width
y = page_height - (((i % (page_height // image_height)) + 1) * image_height)
if x + image_width > page_width or y < 0:
break
# Add the image to the PDF
can.drawImage(ImageReader(image_bytes), x, y, width=image_width, height=image_height)
box_counter += 1
can.showPage()
can.save()
packet.seek(0)
new_pdf = PdfReader(packet)
writer.add_page(new_pdf.pages[0])
# Write the PDF to file
with open(file_name, 'wb') as f:
writer.write(f)
# Clean up temporary image files
for i in range(image_count):
temp_image_path = f"temp_image_{i}.png"
if os.path.exists(temp_image_path):
os.remove(temp_image_path)
@given('the pdf contains {image_count:d} images on {page_count:d} pages')
def step_pdf_contains_images(context, image_count, page_count):
if not hasattr(context, 'param_name'):
context.param_name = "default"
context.file_name = "genericNonCustomisableName.pdf"
create_pdf_with_black_boxes(context.file_name, image_count, page_count)
if not hasattr(context, 'files'):
context.files = {}
if context.param_name in context.files:
context.files[context.param_name].close()
context.files[context.param_name] = open(context.file_name, 'rb')
@given('the pdf contains {page_count:d} pages with random text')
def step_pdf_contains_pages_with_random_text(context, page_count):
buffer = io.BytesIO()
c = canvas.Canvas(buffer, pagesize=letter)
width, height = letter
for _ in range(page_count):
text = ''.join(random.choices(string.ascii_letters + string.digits, k=100))
c.drawString(100, height - 100, text)
c.showPage()
c.save()
with open(context.file_name, 'wb') as f:
f.write(buffer.getvalue())
context.files[context.param_name].close()
context.files[context.param_name] = open(context.file_name, 'rb')
@given('the pdf pages all contain the text "{text}"')
def step_pdf_pages_contain_text(context, text):
buffer = io.BytesIO()
c = canvas.Canvas(buffer, pagesize=letter)
width, height = letter
for _ in range(len(PdfReader(context.file_name).pages)):
c.drawString(100, height - 100, text)
c.showPage()
c.save()
with open(context.file_name, 'wb') as f:
f.write(buffer.getvalue())
context.files[context.param_name].close()
context.files[context.param_name] = open(context.file_name, 'rb')
@given('the pdf is encrypted with password "{password}"')
def step_encrypt_pdf(context, password):
writer = PdfWriter()
reader = PdfReader(context.file_name)
for i in range(len(reader.pages)):
writer.add_page(reader.pages[i])
writer.encrypt(password)
with open(context.file_name, 'wb') as f:
writer.write(f)
context.files[context.param_name].close()
context.files[context.param_name] = open(context.file_name, 'rb')
@given('the request data is')
def step_request_data(context):
context.request_data = eval(context.text)
@given('the request data includes')
def step_request_data_table(context):
context.request_data = {row['parameter']: row['value'] for row in context.table}
@given('save the generated PDF file as "{filename}" for debugging')
def save_generated_pdf(context, filename):
with open(filename, 'wb') as f:
f.write(context.files[context.param_name].read())
print(f"Saved generated PDF content to {filename}")
########
# WHEN #
########
@when('I send a GET request to "{endpoint}"')
def step_send_get_request(context, endpoint):
base_url = "http://localhost:8080"
full_url = f"{base_url}{endpoint}"
response = requests.get(full_url)
context.response = response
@when('I send a GET request to "{endpoint}" with parameters')
def step_send_get_request_with_params(context, endpoint):
base_url = "http://localhost:8080"
params = {row['parameter']: row['value'] for row in context.table}
full_url = f"{base_url}{endpoint}"
response = requests.get(full_url, params=params)
context.response = response
@when('I send the API request to the endpoint "{endpoint}"')
def step_send_api_request(context, endpoint):
url = f"http://localhost:8080{endpoint}"
files = context.files if hasattr(context, 'files') else {}
if not hasattr(context, 'request_data') or context.request_data is None:
context.request_data = {}
form_data = []
for key, value in context.request_data.items():
form_data.append((key, (None, value)))
for key, file in files.items():
mime_type, _ = mimetypes.guess_type(file.name)
mime_type = mime_type or 'application/octet-stream'
print(f"form_data {file.name} with {mime_type}")
form_data.append((key, (file.name, file, mime_type)))
response = requests.post(url, files=form_data)
context.response = response
########
# THEN #
########
@then('the response content type should be "{content_type}"')
def step_check_response_content_type(context, content_type):
actual_content_type = context.response.headers.get('Content-Type', '')
assert actual_content_type.startswith(content_type), f"Expected {content_type} but got {actual_content_type}. Response content: {context.response.content}"
@then('the response file should have size greater than {size:d}')
def step_check_response_file_size(context, size):
response_file = io.BytesIO(context.response.content)
assert len(response_file.getvalue()) > size
@then('the response PDF is not passworded')
def step_check_response_pdf_not_passworded(context):
response_file = io.BytesIO(context.response.content)
reader = PdfReader(response_file)
assert not reader.is_encrypted
@then('the response PDF is passworded')
def step_check_response_pdf_passworded(context):
response_file = io.BytesIO(context.response.content)
try:
reader = PdfReader(response_file)
assert reader.is_encrypted
except PdfReadError as e:
raise AssertionError(f"Failed to read PDF: {str(e)}. Response content: {context.response.content}")
except Exception as e:
raise AssertionError(f"An error occurred: {str(e)}. Response content: {context.response.content}")
@then('the response status code should be {status_code:d}')
def step_check_response_status_code(context, status_code):
assert context.response.status_code == status_code, f"Expected status code {status_code} but got {context.response.status_code}"
@then('the response should contain error message "{message}"')
def step_check_response_error_message(context, message):
response_json = context.response.json()
assert response_json.get('error') == message, f"Expected error message '{message}' but got '{response_json.get('error')}'"
@then('the response PDF should contain {page_count:d} pages')
def step_check_response_pdf_page_count(context, page_count):
response_file = io.BytesIO(context.response.content)
reader = PdfReader(response_file)
assert len(reader.pages) == page_count, f"Expected {page_count} pages but got {len(reader.pages)} pages"
@then('the response PDF metadata should include "{metadata_key}" as "{metadata_value}"')
def step_check_response_pdf_metadata(context, metadata_key, metadata_value):
response_file = io.BytesIO(context.response.content)
reader = PdfReader(response_file)
metadata = reader.metadata
assert metadata.get("/" + metadata_key) == metadata_value, f"Expected {metadata_key} to be '{metadata_value}' but got '{metadata.get(metadata_key)}'"
@then('the response file should have extension "{extension}"')
def step_check_response_file_extension(context, extension):
content_disposition = context.response.headers.get('Content-Disposition', '')
filename = ""
if content_disposition:
parts = content_disposition.split(';')
for part in parts:
if part.strip().startswith('filename'):
filename = part.split('=')[1].strip().strip('"')
break
assert filename.endswith(extension), f"Expected file extension {extension} but got {filename}. Response content: {context.response.content}"
@then('save the response file as "{filename}" for debugging')
def step_save_response_file(context, filename):
with open(filename, 'wb') as f:
f.write(context.response.content)
print(f"Saved response content to {filename}")
@then('the response PDF should contain {page_count:d} pages')
def step_check_response_pdf_page_count(context, page_count):
response_file = io.BytesIO(context.response.content)
reader = PdfReader(io.BytesIO(response_file.getvalue()))
actual_page_count = len(reader.pages)
assert actual_page_count == page_count, f"Expected {page_count} pages but got {actual_page_count} pages"
@then('the response ZIP should contain {file_count:d} files')
def step_check_response_zip_file_count(context, file_count):
response_file = io.BytesIO(context.response.content)
with zipfile.ZipFile(io.BytesIO(response_file.getvalue())) as zip_file:
actual_file_count = len(zip_file.namelist())
assert actual_file_count == file_count, f"Expected {file_count} files but got {actual_file_count} files"
@then('the response ZIP file should contain {doc_count:d} documents each having {pages_per_doc:d} pages')
def step_check_response_zip_doc_page_count(context, doc_count, pages_per_doc):
response_file = io.BytesIO(context.response.content)
with zipfile.ZipFile(io.BytesIO(response_file.getvalue())) as zip_file:
actual_doc_count = len(zip_file.namelist())
assert actual_doc_count == doc_count, f"Expected {doc_count} documents but got {actual_doc_count} documents"
for file_name in zip_file.namelist():
with zip_file.open(file_name) as pdf_file:
reader = PdfReader(pdf_file)
actual_pages_per_doc = len(reader.pages)
assert actual_pages_per_doc == pages_per_doc, f"Expected {pages_per_doc} pages per document but got {actual_pages_per_doc} pages in document {file_name}"
@then('the JSON value of "{key}" should be "{expected_value}"')
def step_check_json_value(context, key, expected_value):
actual_value = context.response.json().get(key)
assert actual_value == expected_value, \
f"Expected JSON value for '{key}' to be '{expected_value}' but got '{actual_value}'"
@then('JSON list entry containing "{identifier_key}" as "{identifier_value}" should have "{target_key}" as "{target_value}"')
def step_check_json_list_entry(context, identifier_key, identifier_self, target_key, target_value):
json_response = context.response.json()
for entry in json_response:
if entry.get(identifier_key) == identifier_value:
assert entry.get(target_key) == target_value, \
f"Expected {target_key} to be {target_value} in entry where {identifier_key} is {identifier_value}, but found {entry.get(target_key)}"
break
else:
raise AssertionError(f"No entry with {identifier_key} as {identifier_value} found")
@then('the response should match the regex "{pattern}"')
def step_response_matches_regex(context, pattern):
response_text = context.response.text
assert re.match(pattern, response_text), \
f"Response '{response_text}' does not match the expected pattern '{pattern}'"

View File

@@ -1,5 +0,0 @@
behave
requests
PyPDF2
reportlab
PyCryptodome

Binary file not shown.

Before

Width:  |  Height:  |  Size: 50 KiB

After

Width:  |  Height:  |  Size: 53 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 9.4 KiB

After

Width:  |  Height:  |  Size: 14 KiB

View File

@@ -1 +1,110 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:svg="http://www.w3.org/2000/svg" id="Layer_1" x="0" y="0" version="1.1" viewBox="0 0 512 512" style="enable-background:new 0 0 512 512" xml:space="preserve"><defs id="defs173"><linearGradient id="XMLID_5_" x1="304.496" x2="316.036" y1="422.91" y2="326.263" gradientUnits="userSpaceOnUse"><stop offset="0" style="stop-color:#dcf1f3" id="stop156"/><stop offset="1" style="stop-color:#c2c2c9" id="stop158"/></linearGradient></defs><style id="style150" type="text/css">.st1{fill:#c02223}.st2{fill:#882425}.st3{fill:url(#XMLID_5_)}.st4{fill:url(#XMLID_7_)}</style><g id="XMLID_4_"><path id="XMLID_131_" d="M 347.01402,14.355825 98.978019,69.02261 C 73.825483,74.547445 55.942464,96.792175 55.942464,122.52628 v 315.06096 c 0,22.39012 16.719895,41.14548 38.819234,43.76251 L 224.8861,498.36042 339.48636,384.26465 455.76603,265.15425 453.73057,84.870162 C 453.43979,62.916214 433.08513,46.632491 411.71274,51.284984 l -28.78729,6.251786 0.14539,-13.666697 C 383.36162,24.678542 365.62399,10.284894 347.01402,14.355825 Z" class="st1" style="stroke-width:1.45391"/><path id="XMLID_117_" d="m 383.21622,57.53677 v 285.8375 L 456.05681,265.00885 454.02135,78.763767 C 453.87595,59.863016 436.28372,45.905539 417.81914,49.97647 Z" class="st2" style="stroke-width:1.45391"/><polygon id="XMLID_18_" points="234.7 422.6 368.5 387.7 393.5 262.2" class="st3" style="fill:url(#XMLID_5_)" transform="matrix(1.4556308,0,0,1.4548265,-116.73161,-116.45231)"/><linearGradient id="XMLID_7_" x1="223.084" x2="241.417" y1="372.756" y2="114.557" gradientTransform="matrix(1.4539039,0,0,1.4539039,-116.19976,-116.20474)" gradientUnits="userSpaceOnUse"><stop offset="0" style="stop-color:#dcf1f3" id="stop163"/><stop offset="1" style="stop-color:#c2c2c9" id="stop165"/></linearGradient><path id="XMLID_6_" d="m 282.89686,214.84917 c 0,0 -22.24473,-28.93269 -38.67384,-36.78377 -10.46811,-4.94327 -26.02489,-6.83335 -38.23768,-0.72695 -18.02841,9.0142 -19.91848,34.31213 -3.34397,44.34406 3.92553,2.47165 9.15959,4.50711 15.99294,6.10641 36.63838,8.43264 97.12077,25.87949 89.70587,96.10304 0,0 -4.21633,65.86185 -73.56753,73.42215 -12.2128,1.30851 -24.57098,0.43617 -36.493,-2.32625 -16.42911,-3.63476 -45.50719,-11.04967 -59.75545,-19.91849 l -2.61703,-75.16682 h 6.97875 c 0,0 13.81208,33.43978 53.06749,49.57812 7.26952,2.90781 15.26599,4.07093 22.97168,2.90781 9.74116,-1.45391 21.22699,-6.68796 25.87949,-22.53551 0,0 7.85108,-23.11707 -32.85823,-35.76604 -32.56744,-10.17733 -63.24481,-20.64543 -75.89378,-54.95757 -5.961,-16.28371 -6.97874,-34.31212 -2.90781,-51.61358 5.37944,-22.53551 20.79082,-54.23062 64.40794,-67.89732 0,0 57.28381,-15.55677 96.53922,5.52484 l -1.74468,89.70587 z" class="st4" style="fill:url(#XMLID_7_);stroke-width:1.45391"/></g></svg>
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Generator: Adobe Illustrator 19.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg
version="1.1"
id="Layer_1"
x="0px"
y="0px"
viewBox="0 0 512 512"
style="enable-background:new 0 0 512 512;"
xml:space="preserve"
sodipodi:docname="favicon.svg"
inkscape:version="1.2.2 (732a01da63, 2022-12-09)"
inkscape:export-filename="favicon.png"
inkscape:export-xdpi="96"
inkscape:export-ydpi="96"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg"><defs
id="defs173">
<linearGradient
id="XMLID_5_"
gradientUnits="userSpaceOnUse"
x1="304.496"
y1="422.9102"
x2="316.036"
y2="326.2626">
<stop
offset="0"
style="stop-color:#DCF1F3"
id="stop156" />
<stop
offset="1"
style="stop-color:#C2C2C9"
id="stop158" />
</linearGradient>
</defs><sodipodi:namedview
id="namedview171"
pagecolor="#ffffff"
bordercolor="#000000"
borderopacity="0.25"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
showgrid="false"
inkscape:zoom="1.4142136"
inkscape:cx="219.91021"
inkscape:cy="232.63813"
inkscape:window-width="3840"
inkscape:window-height="2054"
inkscape:window-x="2869"
inkscape:window-y="-11"
inkscape:window-maximized="1"
inkscape:current-layer="XMLID_4_" />
<style
type="text/css"
id="style150">
.st0{fill:#FFFFFF;}
.st1{fill:#C02223;}
.st2{fill:#882425;}
.st3{fill:url(#XMLID_5_);}
.st4{fill:url(#XMLID_7_);}
</style>
<g
id="XMLID_4_">
<path
id="XMLID_131_"
class="st1"
d="M 347.01402,14.355825 98.978019,69.02261 C 73.825483,74.547445 55.942464,96.792175 55.942464,122.52628 v 315.06096 c 0,22.39012 16.719895,41.14548 38.819234,43.76251 L 224.8861,498.36042 339.48636,384.26465 455.76603,265.15425 453.73057,84.870162 C 453.43979,62.916214 433.08513,46.632491 411.71274,51.284984 l -28.78729,6.251786 0.14539,-13.666697 C 383.36162,24.678542 365.62399,10.284894 347.01402,14.355825 Z"
sodipodi:nodetypes="ccssccccccccc"
style="stroke-width:1.45391" /><path
id="XMLID_117_"
class="st2"
d="m 383.21622,57.53677 v 285.8375 L 456.05681,265.00885 454.02135,78.763767 C 453.87595,59.863016 436.28372,45.905539 417.81914,49.97647 Z"
style="stroke-width:1.45391" /><polygon
id="XMLID_18_"
class="st3"
points="234.7,422.6 368.5,387.7 393.5,262.2 "
style="fill:url(#XMLID_5_)"
transform="matrix(1.4556308,0,0,1.4548265,-116.73161,-116.45231)" />
<linearGradient
id="XMLID_7_"
gradientUnits="userSpaceOnUse"
x1="223.0838"
y1="372.7559"
x2="241.4174"
y2="114.557"
gradientTransform="matrix(1.4539039,0,0,1.4539039,-116.19976,-116.20474)">
<stop
offset="0"
style="stop-color:#DCF1F3"
id="stop163" />
<stop
offset="1"
style="stop-color:#C2C2C9"
id="stop165" />
</linearGradient>
<path
id="XMLID_6_"
class="st4"
d="m 282.89686,214.84917 c 0,0 -22.24473,-28.93269 -38.67384,-36.78377 -10.46811,-4.94327 -26.02489,-6.83335 -38.23768,-0.72695 -18.02841,9.0142 -19.91848,34.31213 -3.34397,44.34406 3.92553,2.47165 9.15959,4.50711 15.99294,6.10641 36.63838,8.43264 97.12077,25.87949 89.70587,96.10304 0,0 -4.21633,65.86185 -73.56753,73.42215 -12.2128,1.30851 -24.57098,0.43617 -36.493,-2.32625 -16.42911,-3.63476 -45.50719,-11.04967 -59.75545,-19.91849 l -2.61703,-75.16682 h 6.97875 c 0,0 13.81208,33.43978 53.06749,49.57812 7.26952,2.90781 15.26599,4.07093 22.97168,2.90781 9.74116,-1.45391 21.22699,-6.68796 25.87949,-22.53551 0,0 7.85108,-23.11707 -32.85823,-35.76604 -32.56744,-10.17733 -63.24481,-20.64543 -75.89378,-54.95757 -5.961,-16.28371 -6.97874,-34.31212 -2.90781,-51.61358 5.37944,-22.53551 20.79082,-54.23062 64.40794,-67.89732 0,0 57.28381,-15.55677 96.53922,5.52484 l -1.74468,89.70587 z"
style="fill:url(#XMLID_7_);stroke-width:1.45391" />
</g>
</svg>

Before

Width:  |  Height:  |  Size: 2.7 KiB

After

Width:  |  Height:  |  Size: 4.0 KiB

View File

@@ -1,11 +1,12 @@
version: '3.3'
services:
stirling-pdf:
container_name: Stirling-PDF-Security-Fat
image: frooodle/s-pdf:latest-fat
container_name: Stirling-PDF-Lite-Security
image: frooodle/s-pdf:latest-lite
deploy:
resources:
limits:
memory: 4G
memory: 2G
healthcheck:
test: ["CMD-SHELL", "curl -f http://localhost:8080/api/v1/info/status | grep -q 'UP' && curl -fL http://localhost:8080/ | grep -q 'Please sign in'"]
interval: 5s
@@ -14,19 +15,16 @@ services:
ports:
- 8080:8080
volumes:
- /stirling/latest/data:/usr/share/tessdata:rw
- /stirling/latest/data:/usr/share/tesseract-ocr/5/tessdata:rw
- /stirling/latest/config:/configs:rw
- /stirling/latest/logs:/logs:rw
environment:
DOCKER_ENABLE_SECURITY: "true"
SECURITY_ENABLELOGIN: "true"
PUID: 1002
PGID: 1002
UMASK: "022"
SYSTEM_DEFAULTLOCALE: en-US
UI_APPNAME: Stirling-PDF
UI_HOMEDESCRIPTION: Demo site for Stirling-PDF Latest-fat with Security
UI_APPNAMENAVBAR: Stirling-PDF Latest-fat
UI_APPNAME: Stirling-PDF-Lite
UI_HOMEDESCRIPTION: Demo site for Stirling-PDF-Lite Latest with Security
UI_APPNAMENAVBAR: Stirling-PDF-Lite Latest
SYSTEM_MAXFILESIZE: "100"
METRICS_ENABLED: "true"
SYSTEM_GOOGLEVISIBILITY: "true"

View File

@@ -0,0 +1,30 @@
version: '3.3'
services:
stirling-pdf:
container_name: Stirling-PDF-Lite
image: frooodle/s-pdf:latest-lite
deploy:
resources:
limits:
memory: 2G
healthcheck:
test: ["CMD-SHELL", "curl -f http://localhost:8080/api/v1/info/status | grep -q 'UP' && curl -fL http://localhost:8080/ | grep -qv 'Please sign in'"]
interval: 5s
timeout: 10s
retries: 16
ports:
- 8080:8080
volumes:
- /stirling/latest/config:/configs:rw
- /stirling/latest/logs:/logs:rw
environment:
DOCKER_ENABLE_SECURITY: "false"
SECURITY_ENABLELOGIN: "false"
SYSTEM_DEFAULTLOCALE: en-US
UI_APPNAME: Stirling-PDF-Lite
UI_HOMEDESCRIPTION: Demo site for Stirling-PDF-Lite Latest
UI_APPNAMENAVBAR: Stirling-PDF-Lite Latest
SYSTEM_MAXFILESIZE: "100"
METRICS_ENABLED: "true"
SYSTEM_GOOGLEVISIBILITY: "true"
restart: on-failure:5

View File

@@ -1,41 +0,0 @@
services:
stirling-pdf:
container_name: Stirling-PDF-Security
image: frooodle/s-pdf:latest
deploy:
resources:
limits:
memory: 4G
healthcheck:
test: ["CMD-SHELL", "curl -f http://localhost:8080/api/v1/info/status | grep -q 'UP' && curl -fL http://localhost:8080/ | grep -q 'Please sign in'"]
interval: 5s
timeout: 10s
retries: 16
ports:
- "8080:8080"
volumes:
- /stirling/latest/data:/usr/share/tessdata:rw
- /stirling/latest/config:/configs:rw
- /stirling/latest/logs:/logs:rw
environment:
DOCKER_ENABLE_SECURITY: "true"
SECURITY_ENABLELOGIN: "true"
SECURITY_OAUTH2_ENABLED: "true"
SECURITY_OAUTH2_AUTOCREATEUSER: "true" # This is set to true to allow auto-creation of non-existing users in Stirling-PDF
SECURITY_OAUTH2_ISSUER: "https://accounts.google.com" # Change with any other provider that supports OpenID Connect Discovery (/.well-known/openid-configuration) end-point
SECURITY_OAUTH2_CLIENTID: "<YOUR CLIENT ID>.apps.googleusercontent.com" # Client ID from your provider
SECURITY_OAUTH2_CLIENTSECRET: "<YOUR CLIENT SECRET>" # Client Secret from your provider
SECURITY_OAUTH2_SCOPES: "openid,profile,email" # Expected OAuth2 Scope
SECURITY_OAUTH2_USEASUSERNAME: "email" # Default is 'email'; custom fields can be used as the username
SECURITY_OAUTH2_PROVIDER: "google" # Set this to your OAuth provider's name, e.g., 'google' or 'keycloak'
PUID: 1002
PGID: 1002
UMASK: "022"
SYSTEM_DEFAULTLOCALE: en-US
UI_APPNAME: Stirling-PDF
UI_HOMEDESCRIPTION: Demo site for Stirling-PDF Latest with Security
UI_APPNAMENAVBAR: Stirling-PDF Latest
SYSTEM_MAXFILESIZE: "100"
METRICS_ENABLED: "true"
SYSTEM_GOOGLEVISIBILITY: "true"
restart: on-failure:5

View File

@@ -1,3 +1,4 @@
version: '3.3'
services:
stirling-pdf:
container_name: Stirling-PDF-Security
@@ -12,17 +13,14 @@ services:
timeout: 10s
retries: 16
ports:
- "8080:8080"
- 8080:8080
volumes:
- /stirling/latest/data:/usr/share/tessdata:rw
- /stirling/latest/data:/usr/share/tesseract-ocr/5/tessdata:rw
- /stirling/latest/config:/configs:rw
- /stirling/latest/logs:/logs:rw
environment:
DOCKER_ENABLE_SECURITY: "true"
SECURITY_ENABLELOGIN: "true"
PUID: 1002
PGID: 1002
UMASK: "022"
SYSTEM_DEFAULTLOCALE: en-US
UI_APPNAME: Stirling-PDF
UI_HOMEDESCRIPTION: Demo site for Stirling-PDF Latest with Security

View File

@@ -1,3 +1,4 @@
version: '3.3'
services:
stirling-pdf:
container_name: Stirling-PDF-Ultra-Lite-Security
@@ -12,9 +13,9 @@ services:
timeout: 10s
retries: 16
ports:
- "8080:8080"
- 8080:8080
volumes:
- /stirling/latest/data:/usr/share/tessdata:rw
- /stirling/latest/data:/usr/share/tesseract-ocr/5/tessdata:rw
- /stirling/latest/config:/configs:rw
- /stirling/latest/logs:/logs:rw
environment:

View File

@@ -1,3 +1,4 @@
version: '3.3'
services:
stirling-pdf:
container_name: Stirling-PDF-Ultra-Lite
@@ -12,7 +13,7 @@ services:
timeout: 10s
retries: 16
ports:
- "8080:8080"
- 8080:8080
volumes:
- /stirling/latest/config:/configs:rw
- /stirling/latest/logs:/logs:rw

View File

@@ -1,3 +1,4 @@
version: '3.3'
services:
stirling-pdf:
container_name: Stirling-PDF
@@ -12,15 +13,14 @@ services:
timeout: 10s
retries: 16
ports:
- "8080:8080"
- 8080:8080
volumes:
- /stirling/latest/data:/usr/share/tessdata:rw
- /stirling/latest/data:/usr/share/tesseract-ocr/5/tessdata:rw
- /stirling/latest/config:/configs:rw
- /stirling/latest/logs:/logs:rw
environment:
DOCKER_ENABLE_SECURITY: "false"
SECURITY_ENABLELOGIN: "false"
LANGS: "en_GB,en_US,ar_AR,de_DE,fr_FR,es_ES,zh_CN,zh_TW,ca_CA,it_IT,sv_SE,pl_PL,ro_RO,ko_KR,pt_BR,ru_RU,el_GR,hi_IN,hu_HU,tr_TR,id_ID"
SYSTEM_DEFAULTLOCALE: en-US
UI_APPNAME: Stirling-PDF
UI_HOMEDESCRIPTION: Demo site for Stirling-PDF Latest

View File

@@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-7.6-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

BIN
images/DemoGif.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

After

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

BIN
images/settings.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 242 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 118 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 145 KiB

BIN
images/stirling-home.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 131 KiB

View File

@@ -1,43 +0,0 @@
{
"name": "OCR images",
"pipeline": [
{
"operation": "/api/v1/convert/img/pdf",
"parameters": {
"fitOption": "fillPage",
"colorType": "color",
"autoRotate": true,
"fileInput": "automated"
}
},
{
"operation": "/api/v1/general/merge-pdfs",
"parameters": {
"sortType": "orderProvided",
"fileInput": "automated"
}
},
{
"operation": "/api/v1/misc/ocr-pdf",
"parameters": {
"languages": [
"eng"
],
"sidecar": false,
"deskew": false,
"clean": false,
"cleanFinal": false,
"ocrType": "skip-text",
"ocrRenderType": "hocr",
"removeImagesAfter": false,
"fileInput": "automated"
}
}
],
"_examples": {
"outputDir": "{outputFolder}/{folderName}",
"outputFileName": "{filename}-{pipelineName}-{date}-{time}"
},
"outputDir": "{outputFolder}",
"outputFileName": "{filename}"
}

View File

@@ -6,8 +6,7 @@
"parameters": {
"horizontalDivisions": 2,
"verticalDivisions": 2,
"fileInput": "automated",
"merge": false
"fileInput": "automated"
}
},
{
@@ -31,4 +30,4 @@
},
"outputDir": "{outputFolder}",
"outputFileName": "{filename}"
}
}

View File

@@ -1,217 +0,0 @@
"""A script to update language progress status in README.md based on
properties file comparison.
This script compares default properties file with others in a directory to
determine language progress.
It then updates README.md based on provided progress list.
Author: Ludy87
Example:
To use this script, simply run it from command line:
$ python counter_translation.py
""" # noqa: D205
import glob
import os
import re
import tomlkit
import tomlkit.toml_file
def convert_to_multiline(data: tomlkit.TOMLDocument) -> tomlkit.TOMLDocument:
"""Converts 'ignore' and 'missing' arrays to multiline arrays and sorts the first-level keys of the TOML document.
Enhances readability and consistency in the TOML file by ensuring arrays contain unique and sorted entries.
Parameters:
data (tomlkit.TOMLDocument): The original TOML document containing the data.
Returns:
tomlkit.TOMLDocument: A new TOML document with sorted keys and properly formatted arrays.
""" # noqa: D205
sorted_data = tomlkit.document()
for key in sorted(data.keys()):
value = data[key]
if isinstance(value, dict):
new_table = tomlkit.table()
for subkey in ("ignore", "missing"):
if subkey in value:
# Convert the list to a set to remove duplicates, sort it, and convert to multiline for readability
unique_sorted_array = sorted(set(value[subkey]))
array = tomlkit.array()
array.multiline(True)
for item in unique_sorted_array:
array.append(item)
new_table[subkey] = array
sorted_data[key] = new_table
else:
# Add other types of data unchanged
sorted_data[key] = value
return sorted_data
def write_readme(progress_list: list[tuple[str, int]]) -> None:
"""Updates the progress status in the README.md file based
on the provided progress list.
Parameters:
progress_list (list[tuple[str, int]]): A list of tuples containing
language and progress percentage.
Returns:
None
""" # noqa: D205
with open("README.md", encoding="utf-8") as file:
content = file.readlines()
for i, line in enumerate(content[2:], start=2):
for progress in progress_list:
language, value = progress
if language in line:
if match := re.search(r"\!\[(\d+(\.\d+)?)%\]\(.*\)", line):
content[i] = line.replace(
match.group(0),
f"![{value}%](https://geps.dev/progress/{value})",
)
with open("README.md", "w", encoding="utf-8") as file:
file.writelines(content)
def compare_files(
default_file_path, file_paths, ignore_translation_file
) -> list[tuple[str, int]]:
"""Compares the default properties file with other
properties files in the directory.
Parameters:
default_file_path (str): The path to the default properties file.
files_directory (str): The directory containing other properties files.
Returns:
list[tuple[str, int]]: A list of tuples containing
language and progress percentage.
""" # noqa: D205
num_lines = sum(
1
for line in open(default_file_path, encoding="utf-8")
if line.strip() and not line.strip().startswith("#")
)
result_list = []
sort_ignore_translation: tomlkit.TOMLDocument
# read toml
with open(ignore_translation_file, encoding="utf-8") as f:
sort_ignore_translation = tomlkit.parse(f.read())
for file_path in file_paths:
language = (
os.path.basename(file_path)
.split("messages_", 1)[1]
.split(".properties", 1)[0]
)
fails = 0
if "en_GB" in language or "en_US" in language:
result_list.append(("en_GB", 100))
result_list.append(("en_US", 100))
continue
if language not in sort_ignore_translation:
sort_ignore_translation[language] = tomlkit.table()
if (
"ignore" not in sort_ignore_translation[language]
or len(sort_ignore_translation[language].get("ignore", [])) < 1
):
sort_ignore_translation[language]["ignore"] = tomlkit.array(
["language.direction"]
)
# if "missing" not in sort_ignore_translation[language]:
# sort_ignore_translation[language]["missing"] = tomlkit.array()
# elif "language.direction" in sort_ignore_translation[language]["missing"]:
# sort_ignore_translation[language]["missing"].remove("language.direction")
with open(default_file_path, encoding="utf-8") as default_file, open(
file_path, encoding="utf-8"
) as file:
for _ in range(5):
next(default_file)
try:
next(file)
except StopIteration:
fails = num_lines
for line_num, (line_default, line_file) in enumerate(
zip(default_file, file), start=6
):
try:
# Ignoring empty lines and lines start with #
if line_default.strip() == "" or line_default.startswith("#"):
continue
default_key, default_value = line_default.split("=", 1)
file_key, file_value = line_file.split("=", 1)
if (
default_value.strip() == file_value.strip()
and default_key.strip()
not in sort_ignore_translation[language]["ignore"]
):
print(
f"{language}: Line {line_num} is missing the translation."
)
# if default_key.strip() not in sort_ignore_translation[language]["missing"]:
# missing_array = tomlkit.array()
# missing_array.append(default_key.strip())
# missing_array.multiline(True)
# sort_ignore_translation[language]["missing"].extend(missing_array)
fails += 1
# elif default_key.strip() in sort_ignore_translation[language]["ignore"]:
# if default_key.strip() in sort_ignore_translation[language]["missing"]:
# sort_ignore_translation[language]["missing"].remove(default_key.strip())
if default_value.strip() != file_value.strip():
# if default_key.strip() in sort_ignore_translation[language]["missing"]:
# sort_ignore_translation[language]["missing"].remove(default_key.strip())
if (
default_key.strip()
in sort_ignore_translation[language]["ignore"]
):
sort_ignore_translation[language]["ignore"].remove(
default_key.strip()
)
except ValueError:
print(f"{line_default}|{line_file}")
exit(1)
except IndexError:
pass
print(f"{language}: {fails} out of {num_lines} lines are not translated.")
result_list.append(
(
language,
int((num_lines - fails) * 100 / num_lines),
)
)
ignore_translation = convert_to_multiline(sort_ignore_translation)
with open(ignore_translation_file, "w", encoding="utf-8") as file:
file.write(tomlkit.dumps(ignore_translation))
unique_data = list(set(result_list))
unique_data.sort(key=lambda x: x[1], reverse=True)
return unique_data
if __name__ == "__main__":
directory = os.path.join(os.getcwd(), "src", "main", "resources")
messages_file_paths = glob.glob(os.path.join(directory, "messages_*.properties"))
reference_file = os.path.join(directory, "messages_en_GB.properties")
scripts_directory = os.path.join(os.getcwd(), "scripts")
translation_state_file = os.path.join(scripts_directory, "ignore_translation.toml")
write_readme(
compare_files(reference_file, messages_file_paths, translation_state_file)
)

View File

@@ -0,0 +1,37 @@
import cv2
import sys
import argparse
import numpy as np
def is_blank_image(image_path, threshold=10, white_percent=99, white_value=255, blur_size=5):
image = cv2.imread(image_path, cv2.IMREAD_GRAYSCALE)
if image is None:
print(f"Error: Unable to read the image file: {image_path}")
return False
# Apply Gaussian blur to reduce noise
blurred_image = cv2.GaussianBlur(image, (blur_size, blur_size), 0)
_, thresholded_image = cv2.threshold(blurred_image, white_value - threshold, white_value, cv2.THRESH_BINARY)
# Calculate the percentage of white pixels in the thresholded image
white_pixels = np.sum(thresholded_image == white_value)
white_pixel_percentage = (white_pixels / thresholded_image.size) * 100
print(f"Page has white pixel percent of {white_pixel_percentage}")
return white_pixel_percentage >= white_percent
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Detect if an image is considered blank or not.')
parser.add_argument('image_path', help='The path to the image file.')
parser.add_argument('-t', '--threshold', type=int, default=10, help='Threshold for determining white pixels. The default value is 10.')
parser.add_argument('-w', '--white_percent', type=float, default=99, help='The percentage of white pixels for an image to be considered blank. The default value is 99.')
args = parser.parse_args()
blank = is_blank_image(args.image_path, args.threshold, args.white_percent)
# Return code 1: The image is considered blank.
# Return code 0: The image is not considered blank.
sys.exit(int(blank))

View File

@@ -4,7 +4,7 @@ if [ "$DOCKER_ENABLE_SECURITY" = "true" ] && [ "$VERSION_TAG" != "alpha" ]; then
if [ ! -f app-security.jar ]; then
echo "Trying to download from: https://github.com/Stirling-Tools/Stirling-PDF/releases/download/v$VERSION_TAG/Stirling-PDF-with-login.jar"
curl -L -o app-security.jar https://github.com/Stirling-Tools/Stirling-PDF/releases/download/v$VERSION_TAG/Stirling-PDF-with-login.jar
# If the first download attempt failed, try with the 'v' prefix
if [ $? -ne 0 ]; then
echo "Trying to download from: https://github.com/Stirling-Tools/Stirling-PDF/releases/download/$VERSION_TAG/Stirling-PDF-with-login.jar"
@@ -14,8 +14,6 @@ if [ "$DOCKER_ENABLE_SECURITY" = "true" ] && [ "$VERSION_TAG" != "alpha" ]; then
if [ $? -eq 0 ]; then # checks if curl was successful
rm -f app.jar
ln -s app-security.jar app.jar
chown stirlingpdfuser:stirlingpdfgroup app.jar || true
chmod 755 app.jar || true
fi
fi
fi

View File

@@ -1,261 +0,0 @@
[ar_AR]
ignore = [
'language.direction',
]
[bg_BG]
ignore = [
'language.direction',
]
[ca_CA]
ignore = [
'PDFToText.tags',
'adminUserSettings.admin',
'language.direction',
'survey.button',
'watermark.type.1',
]
[cs_CZ]
ignore = [
'info',
'language.direction',
'pipeline.header',
'text',
]
[da_DK]
ignore = [
'language.direction',
]
[de_DE]
ignore = [
'AddStampRequest.alphabet',
'AddStampRequest.position',
'PDFToBook.selectText.1',
'PDFToText.tags',
'addPageNumbers.selectText.3',
'alphabet',
'certSign.name',
'language.direction',
'licenses.version',
'pipeline.title',
'pipelineOptions.pipelineHeader',
'sponsor',
'text',
'watermark.type.1',
]
[el_GR]
ignore = [
'language.direction',
]
[es_ES]
ignore = [
'adminUserSettings.roles',
'color',
'error',
'language.direction',
'no',
'showJS.tags',
]
[eu_ES]
ignore = [
'language.direction',
]
[fr_FR]
ignore = [
'AddStampRequest.alphabet',
'AddStampRequest.position',
'AddStampRequest.rotation',
'PDFToBook.selectText.1',
'addPageNumbers.selectText.3',
'adminUserSettings.actions',
'alphabet',
'compare.document.1',
'compare.document.2',
'info',
'language.direction',
'licenses.license',
'licenses.module',
'licenses.nav',
'licenses.version',
'pdfOrganiser.mode',
'pipeline.title',
'pipelineOptions.pipelineHeader',
'sponsor',
'watermark.type.2',
]
[ga_IE]
ignore = [
'language.direction',
]
[hi_IN]
ignore = [
'language.direction',
]
[hr_HR]
ignore = [
'PDFToBook.selectText.1',
'font',
'home.pipeline.title',
'info',
'language.direction',
'pdfOrganiser.tags',
'showJS.tags',
]
[hu_HU]
ignore = [
'language.direction',
]
[id_ID]
ignore = [
'language.direction',
]
[it_IT]
ignore = [
'font',
'language.direction',
'no',
'password',
'pipeline.title',
'pipelineOptions.pipelineHeader',
'removePassword.selectText.2',
'showJS.tags',
'sponsor',
]
[ja_JP]
ignore = [
'language.direction',
]
[ko_KR]
ignore = [
'language.direction',
]
[nl_NL]
ignore = [
'HTMLToPDF.print',
'adjustContrast.contrast',
'compare.document.1',
'compare.document.2',
'error',
'getPdfInfo.downloadJson',
'help',
'info',
'language.direction',
'navbar.allTools',
'printFile.submit',
'showJS.downloadJS',
'sponsor',
]
[no_NB]
ignore = [
'PDFToBook.selectText.1',
'adminUserSettings.admin',
'info',
'language.direction',
'oops',
'sponsor',
]
[pl_PL]
ignore = [
'PDFToBook.selectText.1',
'language.direction',
]
[pt_BR]
ignore = [
'changeMetadata.trapped',
'language.direction',
'pipelineOptions.pipelineHeader',
]
[pt_PT]
ignore = [
'language.direction',
]
[ro_RO]
ignore = [
'language.direction',
]
[ru_RU]
ignore = [
'language.direction',
]
[sk_SK]
ignore = [
'adminUserSettings.admin',
'home.multiTool.title',
'info',
'language.direction',
'navbar.sections.security',
'text',
'watermark.type.1',
]
[sr_LATN_RS]
ignore = [
'language.direction',
'licenses.version',
'poweredBy',
]
[sv_SE]
ignore = [
'language.direction',
]
[th_TH]
ignore = [
'language.direction',
'pipeline.title',
'pipelineOptions.pipelineHeader',
'showJS.tags',
]
[tr_TR]
ignore = [
'language.direction',
]
[uk_UA]
ignore = [
'language.direction',
]
[vi_VN]
ignore = [
'language.direction',
'pipeline.title',
'pipelineOptions.pipelineHeader',
'showJS.tags',
]
[zh_CN]
ignore = [
'language.direction',
]
[zh_TW]
ignore = [
'language.direction',
]

View File

@@ -1,37 +1,6 @@
#!/bin/bash
#!/bin/sh
# Update the user and group IDs as per environment variables
if [ ! -z "$PUID" ] && [ "$PUID" != "$(id -u stirlingpdfuser)" ]; then
usermod -o -u "$PUID" stirlingpdfuser || true
fi
/scripts/download-security-jar.sh
if [ ! -z "$PGID" ] && [ "$PGID" != "$(getent group stirlingpdfgroup | cut -d: -f3)" ]; then
groupmod -o -g "$PGID" stirlingpdfgroup || true
fi
umask "$UMASK" || true
if [[ "$INSTALL_BOOK_AND_ADVANCED_HTML_OPS" == "true" && "$FAT_DOCKER" != "true" ]]; then
echo "issue with calibre in current version, feature currently disabled on Stirling-PDF"
#apk add --no-cache calibre@testing
fi
if [[ "$FAT_DOCKER" != "true" ]]; then
/scripts/download-security-jar.sh
fi
if [[ -n "$LANGS" ]]; then
/scripts/installFonts.sh $LANGS
fi
echo "Setting permissions and ownership for necessary directories..."
# Attempt to change ownership of directories and files
if chown -R stirlingpdfuser:stirlingpdfgroup $HOME /logs /scripts /usr/share/fonts/opentype/noto /configs /customFiles /pipeline /app.jar; then
chmod -R 755 /logs /scripts /usr/share/fonts/opentype/noto /configs /customFiles /pipeline /app.jar || true
# If chown succeeds, execute the command as stirlingpdfuser
exec su-exec stirlingpdfuser "$@"
else
# If chown fails, execute the command without changing the user context
echo "[WARN] Chown failed, running as host user"
exec "$@"
fi
# Run the main command
exec "$@"

View File

@@ -17,15 +17,14 @@ fi
if [[ -n "$TESSERACT_LANGS" ]]; then
# Convert comma-separated values to a space-separated list
LANGS=$(echo $TESSERACT_LANGS | tr ',' ' ')
pattern='^[a-zA-Z]{2,4}(_[a-zA-Z]{2,4})?$'
# Install each language pack
for LANG in $LANGS; do
if [[ $LANG =~ $pattern ]]; then
apk add --no-cache "tesseract-ocr-data-$LANG"
else
echo "Skipping invalid language code"
fi
apt-get install -y "tesseract-ocr-$LANG"
done
fi
/scripts/init-without-ocr.sh "$@"
/scripts/download-security-jar.sh
# Run the main command
exec "$@"

View File

@@ -1,67 +0,0 @@
#!/bin/bash
LANGS=$1
# Function to install a font package
install_font() {
echo "Installing font package: $1"
if ! apk add "$1" --no-cache; then
echo "Failed to install $1"
fi
}
# Install common fonts used across many languages
#common_fonts=(
# font-terminus
# font-dejavu
# font-noto
# font-noto-cjk
# font-awesome
# font-noto-extra
#)
#
#for font in "${common_fonts[@]}"; do
# install_font $font
#done
# Map languages to specific font packages
declare -A language_fonts=(
["ar_AR"]="font-noto-arabic"
["zh_CN"]="font-isas-misc"
["zh_TW"]="font-isas-misc"
["ja_JP"]="font-noto font-noto-thai font-noto-tibetan font-ipa font-sony-misc font-jis-misc"
["ru_RU"]="font-vollkorn font-misc-cyrillic font-mutt-misc font-screen-cyrillic font-winitzki-cyrillic font-cronyx-cyrillic"
["sr_LATN_RS"]="font-vollkorn font-misc-cyrillic font-mutt-misc font-screen-cyrillic font-winitzki-cyrillic font-cronyx-cyrillic"
["uk_UA"]="font-vollkorn font-misc-cyrillic font-mutt-misc font-screen-cyrillic font-winitzki-cyrillic font-cronyx-cyrillic"
["ko_KR"]="font-noto font-noto-thai font-noto-tibetan"
["el_GR"]="font-noto"
["hi_IN"]="font-noto-devanagari"
["bg_BG"]="font-vollkorn font-misc-cyrillic"
["GENERAL"]="font-terminus font-dejavu font-noto font-noto-cjk font-awesome font-noto-extra"
)
# Install fonts for other languages which generally do not need special packages beyond 'font-noto'
other_langs=("en_GB" "en_US" "de_DE" "fr_FR" "es_ES" "ca_CA" "it_IT" "pt_BR" "nl_NL" "sv_SE" "pl_PL" "ro_RO" "hu_HU" "tr_TR" "id_ID" "eu_ES")
if [[ $LANGS == "ALL" ]]; then
# Install all fonts from the language_fonts map
for fonts in "${language_fonts[@]}"; do
for font in $fonts; do
install_font $font
done
done
else
# Split comma-separated languages and install necessary fonts
IFS=',' read -ra LANG_CODES <<< "$LANGS"
for code in "${LANG_CODES[@]}"; do
if [[ " ${other_langs[@]} " =~ " ${code} " ]]; then
install_font font-noto
else
fonts_to_install=${language_fonts[$code]}
if [ ! -z "$fonts_to_install" ]; then
for font in $fonts_to_install; do
install_font $font
done
fi
fi
done
fi

View File

@@ -1,174 +0,0 @@
"""
Author: Ludy87
Description: This script converts a PDF file to WebP images. It includes functionality to resize images if they exceed specified dimensions and handle conversion of PDF pages to WebP format.
Example
-------
To convert a PDF file to WebP images with each page as a separate WebP file:
python script.py input.pdf output_directory
To convert a PDF file to a single WebP image:
python script.py input.pdf output_directory --single
To adjust the DPI resolution for rendering PDF pages:
python script.py input.pdf output_directory --dpi 150
"""
import argparse
import os
from pdf2image import convert_from_path
from PIL import Image
def resize_image(input_image_path, output_image_path, max_size=(16383, 16383)):
"""
Resize the image if its dimensions exceed the maximum allowed size and save it as WebP.
Parameters
----------
input_image_path : str
Path to the input image file.
output_image_path : str
Path where the output WebP image will be saved.
max_size : tuple of int, optional
Maximum allowed dimensions for the image (width, height). Default is (16383, 16383).
Returns
-------
None
"""
try:
# Open the image
image = Image.open(input_image_path)
width, height = image.size
max_width, max_height = max_size
# Check if the image dimensions exceed the maximum allowed dimensions
if width > max_width or height > max_height:
# Calculate the scaling ratio
ratio = min(max_width / width, max_height / height)
new_width = int(width * ratio)
new_height = int(height * ratio)
# Resize the image
resized_image = image.resize((new_width, new_height), Image.LANCZOS)
resized_image.save(output_image_path, format="WEBP", quality=100)
print(
f"The image was successfully resized to ({new_width}, {new_height}) and saved as WebP: {output_image_path}"
)
else:
# If dimensions are within the allowed limits, save the image directly
image.save(output_image_path, format="WEBP", quality=100)
print(f"The image was successfully saved as WebP: {output_image_path}")
except Exception as e:
print(f"An error occurred: {e}")
def convert_image_to_webp(input_image, output_file):
"""
Convert an image to WebP format, resizing it if it exceeds the maximum dimensions.
Parameters
----------
input_image : str
Path to the input image file.
output_file : str
Path where the output WebP image will be saved.
Returns
-------
None
"""
# Resize the image if it exceeds the maximum dimensions
resize_image(input_image, output_file, max_size=(16383, 16383))
def pdf_to_webp(pdf_path, output_dir, dpi=300):
"""
Convert each page of a PDF file to WebP images.
Parameters
----------
pdf_path : str
Path to the input PDF file.
output_dir : str
Directory where the WebP images will be saved.
dpi : int, optional
DPI resolution for rendering PDF pages. Default is 300.
Returns
-------
None
"""
# Convert the PDF to a list of images
images = convert_from_path(pdf_path, dpi=dpi)
for page_number, image in enumerate(images):
# Define temporary PNG path
temp_png_path = os.path.join(output_dir, f"temp_page_{page_number + 1}.png")
image.save(temp_png_path, format="PNG")
# Define the output path for WebP
output_path = os.path.join(output_dir, f"page_{page_number + 1}.webp")
# Convert PNG to WebP
convert_image_to_webp(temp_png_path, output_path)
# Delete the temporary PNG file
os.remove(temp_png_path)
def main(pdf_image_path, output_dir, dpi=300, single_images_flag=False):
"""
Main function to handle conversion from PDF to WebP images.
Parameters
----------
pdf_image_path : str
Path to the input PDF file or image.
output_dir : str
Directory where the WebP images will be saved.
dpi : int, optional
DPI resolution for rendering PDF pages. Default is 300.
single_images_flag : bool, optional
If True, combine all pages into a single WebP image. Default is False.
Returns
-------
None
"""
if single_images_flag:
# Combine all pages into a single WebP image
output_path = os.path.join(output_dir, "combined_image.webp")
convert_image_to_webp(pdf_image_path, output_path)
else:
# Convert each PDF page to a separate WebP image
pdf_to_webp(pdf_image_path, output_dir, dpi)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Convert a PDF file to WebP images.")
parser.add_argument("pdf_path", help="The path to the input PDF file.")
parser.add_argument(
"output_dir", help="The directory where the WebP images should be saved."
)
parser.add_argument(
"--dpi",
type=int,
default=300,
help="The DPI resolution for rendering the PDF pages (default: 300).",
)
parser.add_argument(
"--single",
action="store_true",
help="Combine all pages into a single WebP image.",
)
args = parser.parse_args()
os.makedirs(args.output_dir, exist_ok=True)
main(
args.pdf_path,
args.output_dir,
dpi=args.dpi,
single_images_flag=args.single,
)

View File

@@ -2,7 +2,7 @@ import argparse
import sys
import cv2
import numpy as np
import os
import os
def find_photo_boundaries(image, background_color, tolerance=30, min_area=10000, min_contour_area=500):
mask = cv2.inRange(image, background_color - tolerance, background_color + tolerance)
@@ -49,9 +49,9 @@ def auto_rotate(image, angle_threshold=1):
angles = []
for rho, theta in lines[:, 0]:
angles.append((theta * 180) / np.pi - 90)
angle = np.median(angles)
if abs(angle) < angle_threshold:
return image
@@ -65,16 +65,16 @@ def auto_rotate(image, angle_threshold=1):
def crop_borders(image, border_color, tolerance=30):
mask = cv2.inRange(image, border_color - tolerance, border_color + tolerance)
contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
if len(contours) == 0:
return image
largest_contour = max(contours, key=cv2.contourArea)
x, y, w, h = cv2.boundingRect(largest_contour)
return image[y:y+h, x:x+w]
def split_photos(input_file, output_directory, tolerance=30, min_area=10000, min_contour_area=500, angle_threshold=10, border_size=0):
image = cv2.imread(input_file)
background_color = estimate_background_color(image)
@@ -110,7 +110,7 @@ if __name__ == "__main__":
parser.add_argument("--min_contour_area", type=int, default=500, help="Sets the minimum contour area threshold for a photo (default: 500).")
parser.add_argument("--angle_threshold", type=int, default=10, help="Sets the minimum absolute angle required for the image to be rotated (default: 10).")
parser.add_argument("--border_size", type=int, default=0, help="Sets the size of the border added and removed to prevent white borders in the output (default: 0).")
args = parser.parse_args()
split_photos(args.input_file, args.output_directory, tolerance=args.tolerance, min_area=args.min_area, min_contour_area=args.min_contour_area, angle_threshold=args.angle_threshold, border_size=args.border_size)

View File

@@ -1,25 +0,0 @@
package stirling.software.SPDF.EE;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
import stirling.software.SPDF.model.ApplicationProperties;
@Configuration
@Lazy
public class EEAppConfig {
private static final Logger logger = LoggerFactory.getLogger(EEAppConfig.class);
@Autowired ApplicationProperties applicationProperties;
@Bean(name = "RunningEE")
public boolean runningEnterpriseEdition() {
// TODO: Implement EE detection
return false;
}
}

View File

@@ -6,15 +6,11 @@ import java.net.Socket;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.github.pixee.security.SystemCommand;
public class LibreOfficeListener {
private static final Logger logger = LoggerFactory.getLogger(LibreOfficeListener.class);
private static final long ACTIVITY_TIMEOUT = 20L * 60 * 1000; // 20 minutes
private static final long ACTIVITY_TIMEOUT = 20 * 60 * 1000; // 20 minutes
private static final LibreOfficeListener INSTANCE = new LibreOfficeListener();
private static final int LISTENER_PORT = 2002;
@@ -31,12 +27,14 @@ public class LibreOfficeListener {
private LibreOfficeListener() {}
private boolean isListenerRunning() {
System.out.println("waiting for listener to start");
try (Socket socket = new Socket()) {
try {
System.out.println("waiting for listener to start");
Socket socket = new Socket();
socket.connect(
new InetSocketAddress("localhost", 2002), 1000); // Timeout after 1 second
socket.close();
return true;
} catch (Exception e) {
} catch (IOException e) {
return false;
}
}
@@ -65,7 +63,6 @@ public class LibreOfficeListener {
try {
Thread.sleep(5000); // Check for inactivity every 5 seconds
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
}
}
@@ -83,8 +80,8 @@ public class LibreOfficeListener {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
logger.error("exception", e);
// TODO Auto-generated catch block
e.printStackTrace();
} // Check every 1 second
}
}

View File

@@ -1,17 +1,10 @@
package stirling.software.SPDF;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.core.env.Environment;
@@ -21,33 +14,23 @@ import io.github.pixee.security.SystemCommand;
import jakarta.annotation.PostConstruct;
import stirling.software.SPDF.config.ConfigInitializer;
import stirling.software.SPDF.model.ApplicationProperties;
import stirling.software.SPDF.utils.GeneralUtils;
@SpringBootApplication
@EnableScheduling
public class SPdfApplication {
private static final Logger logger = LoggerFactory.getLogger(SPdfApplication.class);
@Autowired private Environment env;
@Autowired ApplicationProperties applicationProperties;
private static String serverPortStatic;
@Value("${server.port:8080}")
public void setServerPortStatic(String port) {
SPdfApplication.serverPortStatic = port;
}
@PostConstruct
public void init() {
// Check if the BROWSER_OPEN environment variable is set to true
String browserOpenEnv = env.getProperty("BROWSER_OPEN");
boolean browserOpen = browserOpenEnv != null && "true".equalsIgnoreCase(browserOpenEnv);
if (browserOpen) {
try {
String url = "http://localhost:" + getNonStaticPort();
String url = "http://localhost:" + getPort();
String os = System.getProperty("os.name").toLowerCase();
Runtime rt = Runtime.getRuntime();
@@ -56,77 +39,45 @@ public class SPdfApplication {
SystemCommand.runCommand(rt, "rundll32 url.dll,FileProtocolHandler " + url);
}
} catch (Exception e) {
logger.error("Error opening browser: {}", e.getMessage());
e.printStackTrace();
}
}
logger.info("Running configs {}", applicationProperties.toString());
}
public static void main(String[] args) throws IOException, InterruptedException {
public static void main(String[] args) {
SpringApplication app = new SpringApplication(SPdfApplication.class);
app.setAdditionalProfiles("default");
app.addInitializers(new ConfigInitializer());
Map<String, String> propertyFiles = new HashMap<>();
// stirling pdf settings file
if (Files.exists(Paths.get("configs/settings.yml"))) {
propertyFiles.put("spring.config.additional-location", "file:configs/settings.yml");
} else {
logger.warn(
"External configuration file 'configs/settings.yml' does not exist. Using default configuration and environment configuration instead.");
}
// custom javs settings file
if (Files.exists(Paths.get("configs/custom_settings.yml"))) {
String existingLocation =
propertyFiles.getOrDefault("spring.config.additional-location", "");
if (!existingLocation.isEmpty()) {
existingLocation += ",";
}
propertyFiles.put(
"spring.config.additional-location",
existingLocation + "file:configs/custom_settings.yml");
} else {
logger.warn("Custom configuration file 'configs/custom_settings.yml' does not exist.");
}
if (!propertyFiles.isEmpty()) {
app.setDefaultProperties(
Collections.singletonMap(
"spring.config.additional-location",
propertyFiles.get("spring.config.additional-location")));
"spring.config.additional-location", "file:configs/settings.yml"));
} else {
System.out.println(
"External configuration file 'configs/settings.yml' does not exist. Using default configuration and environment configuration instead.");
}
app.run(args);
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException("Thread interrupted while sleeping", e);
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
Files.createDirectories(Path.of("customFiles/static/"));
Files.createDirectories(Path.of("customFiles/templates/"));
} catch (Exception e) {
logger.error("Error creating directories: {}", e.getMessage());
GeneralUtils.createDir("customFiles/static/");
GeneralUtils.createDir("customFiles/templates/");
System.out.println("Stirling-PDF Started.");
String url = "http://localhost:" + getPort();
System.out.println("Navigate to " + url);
}
public static String getPort() {
String port = System.getProperty("local.server.port");
if (port == null || port.isEmpty()) {
port = "8080";
}
printStartupLogs();
}
private static void printStartupLogs() {
logger.info("Stirling-PDF Started.");
String url = "http://localhost:" + getStaticPort();
logger.info("Navigate to {}", url);
}
public static String getStaticPort() {
return serverPortStatic;
}
public String getNonStaticPort() {
return serverPortStatic;
return port;
}
}

Some files were not shown because too many files have changed in this diff Show More