Compare commits
156 Commits
bstoeger-m
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b126fccb1b | ||
|
|
10fc3bfd47 | ||
|
|
a8c9781205 | ||
|
|
291ed9d7e3 | ||
|
|
a39f0e2891 | ||
|
|
d9f50bb8e0 | ||
|
|
d1db85005b | ||
|
|
e2d3a12555 | ||
|
|
568aeb7bce | ||
|
|
ca5f28206b | ||
|
|
adaa52bf9b | ||
|
|
692ec9ee5c | ||
|
|
c2c5faeaad | ||
|
|
88acef7f0f | ||
|
|
32cd52869b | ||
|
|
3d96642b8d | ||
|
|
c5546fb52f | ||
|
|
f65afaf5d2 | ||
|
|
9243921cbb | ||
|
|
d27451979d | ||
|
|
e7d486982f | ||
|
|
5b941ea34e | ||
|
|
56f1e7027f | ||
|
|
64d4de4a1b | ||
|
|
e39b42df53 | ||
|
|
398cc2b639 | ||
|
|
2776a2fe48 | ||
|
|
1aa5438b2d | ||
|
|
ecc6f64d10 | ||
|
|
8c14fb971c | ||
|
|
6bdfee080d | ||
|
|
21269183bf | ||
|
|
245f8002a8 | ||
|
|
c3d807802d | ||
|
|
a66bdb1bf5 | ||
|
|
b579342639 | ||
|
|
888704e816 | ||
|
|
6880937838 | ||
|
|
d018b72dab | ||
|
|
b3d6920de4 | ||
|
|
912badadd4 | ||
|
|
1c0fe2fa1f | ||
|
|
48ef4b3a01 | ||
|
|
22082bd60a | ||
|
|
be1b80ea8a | ||
|
|
e81b42d533 | ||
|
|
dd50ab0106 | ||
|
|
0d6b572a9f | ||
|
|
21f64134b7 | ||
|
|
7bf40d659c | ||
|
|
6ae2844f24 | ||
|
|
447f9709f7 | ||
|
|
4ae6c0bbc4 | ||
|
|
6fc8310705 | ||
|
|
e20ec9248c | ||
|
|
8769b1232e | ||
|
|
d061a54e3d | ||
|
|
306dad575c | ||
|
|
331d6712c6 | ||
|
|
f4e61aa5dc | ||
|
|
528532572f | ||
|
|
a83349015a | ||
|
|
8627f6fc4a | ||
|
|
5bad522390 | ||
|
|
edb771e8e6 | ||
|
|
17d83acdab | ||
|
|
133354d51d | ||
|
|
46cf2fc086 | ||
|
|
5ac1922d84 | ||
|
|
3153a139b3 | ||
|
|
e65c7cedc8 | ||
|
|
32a08735c3 | ||
|
|
af6caa6fa2 | ||
|
|
f3c7dcf9c9 | ||
|
|
bb00a9728f | ||
|
|
a2cd621819 | ||
|
|
d92777a3ff | ||
|
|
e09a134a3f | ||
|
|
aecb4f5005 | ||
|
|
358b9186bf | ||
|
|
12ae3d4e96 | ||
|
|
34926f1325 | ||
|
|
da8509d29b | ||
|
|
9deef235e2 | ||
|
|
e7a6de3894 | ||
|
|
177a1a6706 | ||
|
|
556ecd5a9b | ||
|
|
b320942343 | ||
|
|
8867edffe8 | ||
|
|
991b72d4ff | ||
|
|
47254d91e0 | ||
|
|
9923d49624 | ||
|
|
9febfdd084 | ||
|
|
8677d1f325 | ||
|
|
6dd5ec296d | ||
|
|
4de109bbf1 | ||
|
|
d83da05f8d | ||
|
|
b6439e0420 | ||
|
|
ab7218d543 | ||
|
|
2c2ad1e5c9 | ||
|
|
4af2ec88bd | ||
|
|
c6cd10a43f | ||
|
|
8e106b0449 | ||
|
|
da7ea17b66 | ||
|
|
b097c0a44f | ||
|
|
ec0bc2d06c | ||
|
|
b89029353f | ||
|
|
14cfb17c1a | ||
|
|
157b52aa6e | ||
|
|
322c3b55e6 | ||
|
|
a123589efb | ||
|
|
ae299d5e66 | ||
|
|
422f693f5b | ||
|
|
84b7ffafd2 | ||
|
|
e65ba50c1a | ||
|
|
82ba236859 | ||
|
|
177b03958c | ||
|
|
bd3f9b107e | ||
|
|
15b17a0aca | ||
|
|
b542a39a00 | ||
|
|
9a052cd089 | ||
|
|
4db19d6284 | ||
|
|
8b51ff7ded | ||
|
|
fe68870079 | ||
|
|
f69686d429 | ||
|
|
77e8c3655e | ||
|
|
bd31efa821 | ||
|
|
80eff1d6e2 | ||
|
|
74b1648fcc | ||
|
|
7fb0e9e59e | ||
|
|
d59bd1831a | ||
|
|
f05808fded | ||
|
|
1692e48163 | ||
|
|
375b08857c | ||
|
|
d49092ac70 | ||
|
|
ee8b37cc6e | ||
|
|
77281f8bfe | ||
|
|
6b11457ddd | ||
|
|
8a20509861 | ||
|
|
334a357c19 | ||
|
|
1973d7c881 | ||
|
|
a5575f43a3 | ||
|
|
ffce4bc097 | ||
|
|
5980db073a | ||
|
|
4dcc9210a9 | ||
|
|
f4b35f67f6 | ||
|
|
0d011231e6 | ||
|
|
de313bd01a | ||
|
|
2172e18298 | ||
|
|
dd466d2d48 | ||
|
|
916c88ded9 | ||
|
|
176ee1ac9c | ||
|
|
0ea287cc2c | ||
|
|
8c861f749f | ||
|
|
f1bd5dc051 | ||
|
|
d64986415c |
56
.github/actions/manage-version/action.yml
vendored
Normal file
56
.github/actions/manage-version/action.yml
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
name: Manage the Subsurface CICD versioning
|
||||
|
||||
inputs:
|
||||
no-increment:
|
||||
description: 'Only get the current version, do not increment it even for push events (Caution: not actually a boolean)'
|
||||
default: false
|
||||
nightly-builds-secret:
|
||||
description: The secret to access the nightly builds repository
|
||||
default: ''
|
||||
|
||||
outputs:
|
||||
version:
|
||||
description: The long form version number
|
||||
value: ${{ steps.version_number.outputs.version }}
|
||||
buildnr:
|
||||
description: The build number
|
||||
value: ${{ steps.version_number.outputs.buildnr }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: atomically create or retrieve the build number and assemble release notes for a push (i.e. merging of a pull request)
|
||||
if: github.event_name == 'push' && inputs.no-increment == 'false'
|
||||
env:
|
||||
NIGHTLY_BUILDS_SECRET: ${{ inputs.nightly-builds-secret }}
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -z "$NIGHTLY_BUILDS_SECRET" ]; then
|
||||
echo "Need to supply the secret for the nightly-builds repository to increment the version number, aborting."
|
||||
exit 1
|
||||
fi
|
||||
scripts/get-atomic-buildnr.sh $GITHUB_SHA $NIGHTLY_BUILDS_SECRET "CICD-release"
|
||||
|
||||
- name: retrieve the current version number in all other cases
|
||||
if: github.event_name != 'push' || inputs.no-increment != 'false'
|
||||
env:
|
||||
PULL_REQUEST_BRANCH: ${{ github.event.pull_request.head.ref }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo "pull-request-$PULL_REQUEST_BRANCH" > latest-subsurface-buildnumber-extension
|
||||
|
||||
- name: store version number for the build
|
||||
id: version_number
|
||||
env:
|
||||
PULL_REQUEST_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||
shell: bash
|
||||
run: |
|
||||
git config --global --add safe.directory $GITHUB_WORKSPACE
|
||||
# For a pull request we need the information from the pull request branch
|
||||
# and not from the merge branch on the pull request
|
||||
git checkout $PULL_REQUEST_HEAD_SHA
|
||||
version=$(scripts/get-version.sh)
|
||||
echo "version=$version" >> $GITHUB_OUTPUT
|
||||
buildnr=$(scripts/get-version.sh 1)
|
||||
echo "buildnr=$buildnr" >> $GITHUB_OUTPUT
|
||||
git checkout $GITHUB_SHA
|
||||
8
.github/workflows/android-dockerimage.yml
vendored
8
.github/workflows/android-dockerimage.yml
vendored
@ -15,17 +15,17 @@ jobs:
|
||||
VERSION: ${{ '5.15.2' }} # the version numbers here is based on the Qt version, the third digit is the rev of the docker image
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Build the name for the docker image
|
||||
id: build_name
|
||||
run: |
|
||||
v=${{ env.VERSION }}
|
||||
b=${{ github.ref }} # -BRANCH suffix, unless the branch is master
|
||||
v=$VERSION
|
||||
b=$GITHUB_REF # -BRANCH suffix, unless the branch is master
|
||||
b=${b/refs\/heads\//}
|
||||
b=${b,,} # the name needs to be all lower case
|
||||
if [ $b = "master" ] ; then b="" ; else b="-$b" ; fi
|
||||
echo "NAME=subsurface/android-build${b}:${v}" >> $GITHUB_OUTPUT
|
||||
echo "NAME=$GITHUB_REPOSITORY_OWNER/android-build${b}:${v}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build and Publish Linux Docker image to Dockerhub
|
||||
uses: elgohr/Publish-Docker-Github-Action@v5
|
||||
|
||||
63
.github/workflows/android.yml
vendored
63
.github/workflows/android.yml
vendored
@ -1,4 +1,5 @@
|
||||
name: Android
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
@ -11,12 +12,10 @@ on:
|
||||
branches:
|
||||
- master
|
||||
|
||||
env:
|
||||
BUILD_ROOT: ${{ github.workspace }}/..
|
||||
KEYSTORE_FILE: ${{ github.workspace }}/../subsurface.keystore
|
||||
|
||||
jobs:
|
||||
buildAndroid:
|
||||
build:
|
||||
env:
|
||||
KEYSTORE_FILE: ${{ github.workspace }}/../subsurface.keystore
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: docker://subsurface/android-build:5.15.2
|
||||
@ -24,32 +23,33 @@ jobs:
|
||||
steps:
|
||||
- name: checkout sources
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: recursive
|
||||
|
||||
- name: atomically create or retrieve the build number and assemble release notes
|
||||
- name: set the version information
|
||||
id: version_number
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
bash scripts/get-atomic-buildnr.sh ${{ github.sha }} ${{ secrets.NIGHTLY_BUILDS }} "CICD-release"
|
||||
version=$(cat release-version)
|
||||
echo "version=$version" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: store dummy version and build number for non-push build runs
|
||||
if: github.event_name != 'push'
|
||||
run: |
|
||||
echo "100" > latest-subsurface-buildnumber
|
||||
echo "CICD-pull-request" > latest-subsurface-buildnumber-extension
|
||||
uses: ./.github/actions/manage-version
|
||||
with:
|
||||
nightly-builds-secret: ${{ secrets.NIGHTLY_BUILDS }}
|
||||
|
||||
- name: set up the keystore
|
||||
if: github.event_name == 'push'
|
||||
env:
|
||||
ANDROID_KEYSTORE_BASE64: ${{ secrets.ANDROID_KEYSTORE_BASE64 }}
|
||||
run: |
|
||||
echo "${{ secrets.ANDROID_KEYSTORE_BASE64 }}" | base64 -d > $KEYSTORE_FILE
|
||||
echo "$ANDROID_KEYSTORE_BASE64" | base64 -d > $KEYSTORE_FILE
|
||||
|
||||
- name: run build
|
||||
id: build
|
||||
env:
|
||||
KEYSTORE_PASSWORD: pass:${{ secrets.ANDROID_KEYSTORE_PASSWORD }}
|
||||
KEYSTORE_ALIAS: ${{ secrets.ANDROID_KEYSTORE_ALIAS }}
|
||||
BUILDNR: ${{ steps.version_number.outputs.buildnr }}
|
||||
run: |
|
||||
# this is rather awkward, but it allows us to use the preinstalled
|
||||
# Android and Qt versions with relative paths
|
||||
cd $BUILD_ROOT
|
||||
cd ..
|
||||
ln -s /android/5.15.* .
|
||||
ln -s /android/build-tools .
|
||||
ln -s /android/cmdline-tools .
|
||||
@ -62,17 +62,25 @@ jobs:
|
||||
git config --global --add safe.directory $GITHUB_WORKSPACE
|
||||
git config --global --add safe.directory $GITHUB_WORKSPACE/libdivecomputer
|
||||
# get the build number via curl so this works both for a pull request as well as a push
|
||||
BUILDNR=$(curl -q https://raw.githubusercontent.com/subsurface/nightly-builds/main/latest-subsurface-buildnumber)
|
||||
export OUTPUT_DIR="$GITHUB_WORKSPACE"
|
||||
export KEYSTORE_FILE="$KEYSTORE_FILE"
|
||||
export KEYSTORE_PASSWORD="pass:${{ secrets.ANDROID_KEYSTORE_PASSWORD }}"
|
||||
export KEYSTORE_ALIAS="${{ secrets.ANDROID_KEYSTORE_ALIAS }}"
|
||||
bash -x ./subsurface/packaging/android/qmake-build.sh -buildnr ${BUILDNR}
|
||||
bash -x ./subsurface/packaging/android/qmake-build.sh -buildnr $BUILDNR
|
||||
|
||||
- name: delete the keystore
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
rm $KEYSTORE_FILE
|
||||
|
||||
- name: publish pull request artifacts
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: Subsurface-Android-${{ steps.version_number.outputs.version }}
|
||||
path: Subsurface-mobile-*.apk
|
||||
|
||||
# only publish a 'release' on push events (those include merging a PR)
|
||||
- name: upload binaries
|
||||
if: github.event_name == 'push'
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
tag_name: v${{ steps.version_number.outputs.version }}
|
||||
repository: ${{ github.repository_owner }}/nightly-builds
|
||||
@ -81,8 +89,3 @@ jobs:
|
||||
fail_on_unmatched_files: true
|
||||
files: |
|
||||
Subsurface-mobile-${{ steps.version_number.outputs.version }}.apk
|
||||
|
||||
- name: delete the keystore
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
rm $KEYSTORE_FILE
|
||||
|
||||
24
.github/workflows/artifact-links.yml
vendored
Normal file
24
.github/workflows/artifact-links.yml
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
name: Add artifact links to pull request
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Ubuntu 16.04 / Qt 5.15-- for AppImage", "Mac", "Windows", "Android", "iOS"]
|
||||
types: [completed]
|
||||
|
||||
jobs:
|
||||
artifacts-url-comments:
|
||||
name: Add artifact links to PR and issues
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
steps:
|
||||
- name: Add artifact links to PR and issues
|
||||
if: github.event.workflow_run.event == 'pull_request'
|
||||
uses: tonyhallett/artifacts-url-comments@v1.1.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
prefix: "**Artifacts:**"
|
||||
suffix: "_**WARNING:** Use at your own risk._"
|
||||
format: name
|
||||
addTo: pull
|
||||
errorNoArtifacts: false
|
||||
19
.github/workflows/codeql-analysis.yml
vendored
19
.github/workflows/codeql-analysis.yml
vendored
@ -25,20 +25,19 @@ jobs:
|
||||
matrix:
|
||||
# Override automatic language detection by changing the below list
|
||||
# Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python']
|
||||
language: ['cpp', 'javascript']
|
||||
language: ['c-cpp', 'javascript-typescript']
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
fetch-depth: 2
|
||||
fetch-depth: 0
|
||||
submodules: recursive
|
||||
|
||||
- name: get container ready for build
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y -q --force-yes \
|
||||
sudo apt-get install -y -q \
|
||||
autoconf automake cmake g++ git libcrypto++-dev libcurl4-gnutls-dev \
|
||||
libgit2-dev libqt5qml5 libqt5quick5 libqt5svg5-dev \
|
||||
libqt5webkit5-dev libsqlite3-dev libssh2-1-dev libssl-dev libssl-dev \
|
||||
@ -51,7 +50,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@ -60,13 +59,11 @@ jobs:
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
- name: Build
|
||||
env:
|
||||
SUBSURFACE_REPO_PATH: ${{ github.workspace }}
|
||||
run: |
|
||||
cd ..
|
||||
git config --global --add safe.directory ${SUBSURFACE_REPO_PATH}
|
||||
git config --global --add safe.directory ${SUBSURFACE_REPO_PATH}/libdivecomputer
|
||||
git config --global --add safe.directory $GITHUB_WORKSPACE
|
||||
git config --global --add safe.directory $GITHUB_WORKSPACE/libdivecomputer
|
||||
bash -e -x subsurface/scripts/build.sh -desktop -build-with-webkit
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
|
||||
28
.github/workflows/coverity-scan.yml
vendored
28
.github/workflows/coverity-scan.yml
vendored
@ -1,4 +1,5 @@
|
||||
name: Coverity Scan Linux Qt 5.9
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 18 * * *' # Daily at 18:00 UTC
|
||||
@ -10,14 +11,11 @@ jobs:
|
||||
image: ubuntu:22.04
|
||||
|
||||
steps:
|
||||
- name: checkout sources
|
||||
uses: actions/checkout@v1
|
||||
|
||||
- name: add build dependencies
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get upgrade -y
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y -q --force-yes \
|
||||
apt-get dist-upgrade -y
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y -q \
|
||||
wget curl \
|
||||
autoconf automake cmake g++ git libcrypto++-dev libcurl4-gnutls-dev \
|
||||
libgit2-dev libqt5qml5 libqt5quick5 libqt5svg5-dev \
|
||||
@ -29,12 +27,22 @@ jobs:
|
||||
qtpositioning5-dev qtscript5-dev qttools5-dev qttools5-dev-tools \
|
||||
qtquickcontrols2-5-dev libbluetooth-dev libmtp-dev
|
||||
|
||||
- name: checkout sources
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: recursive
|
||||
|
||||
- name: configure environment
|
||||
env:
|
||||
SUBSURFACE_REPO_PATH: ${{ github.workspace }}
|
||||
run: |
|
||||
git config --global --add safe.directory ${SUBSURFACE_REPO_PATH}
|
||||
git config --global --add safe.directory ${SUBSURFACE_REPO_PATH}/libdivecomputer
|
||||
git config --global --add safe.directory $GITHUB_WORKSPACE
|
||||
git config --global --add safe.directory $GITHUB_WORKSPACE/libdivecomputer
|
||||
|
||||
- name: get the version information
|
||||
id: version_number
|
||||
uses: ./.github/actions/manage-version
|
||||
with:
|
||||
no-increment: true
|
||||
|
||||
- name: run coverity scan
|
||||
uses: vapier/coverity-scan-action@v1
|
||||
@ -44,5 +52,5 @@ jobs:
|
||||
email: glance@acc.umu.se
|
||||
command: subsurface/scripts/build.sh -desktop -build-with-webkit
|
||||
working-directory: ${{ github.workspace }}/..
|
||||
version: $(/scripts/get-version)
|
||||
version: ${{ steps.version_number.outputs.version }}
|
||||
description: Automatic scan on github actions
|
||||
|
||||
3
.github/workflows/documentation.yml
vendored
3
.github/workflows/documentation.yml
vendored
@ -26,6 +26,9 @@ jobs:
|
||||
|
||||
- name: Checkout Sources
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: recursive
|
||||
|
||||
- name: Process the Documentation
|
||||
id: process_documentation
|
||||
|
||||
24
.github/workflows/fedora-copr-build.yml
vendored
24
.github/workflows/fedora-copr-build.yml
vendored
@ -11,30 +11,32 @@ jobs:
|
||||
setup-build:
|
||||
name: Submit build to Fedora COPR
|
||||
# this seems backwards, but we want to run under Fedora, but Github doesn' support that
|
||||
container: fedora:latest
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: fedora:latest
|
||||
|
||||
steps:
|
||||
- name: Check out sources
|
||||
uses: actions/checkout@v1
|
||||
|
||||
- name: Setup build dependencies in the Fedora container
|
||||
run: |
|
||||
dnf -y install @development-tools @rpm-development-tools
|
||||
dnf -y install copr-cli make
|
||||
|
||||
- name: Check out sources
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: recursive
|
||||
|
||||
- name: setup git
|
||||
run: |
|
||||
git config --global --add safe.directory /__w/subsurface/subsurface
|
||||
git config --global --add safe.directory /__w/subsurface/subsurface/libdivecomputer
|
||||
|
||||
- name: atomically create or retrieve the build number
|
||||
- name: set the version information
|
||||
id: version_number
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
bash scripts/get-atomic-buildnr.sh ${{ github.sha }} ${{ secrets.NIGHTLY_BUILDS }} "CICD-release"
|
||||
version=$(cat release-version)
|
||||
echo "version=$version" >> $GITHUB_OUTPUT
|
||||
uses: ./.github/actions/manage-version
|
||||
with:
|
||||
nightly-builds-secret: ${{ secrets.NIGHTLY_BUILDS }}
|
||||
|
||||
- name: Setup API token for copr-cli
|
||||
env:
|
||||
@ -53,5 +55,5 @@ jobs:
|
||||
- name: run the copr build script
|
||||
run: |
|
||||
cd ..
|
||||
bash -x subsurface/packaging/copr/make-package.sh ${{ github.ref_name }}
|
||||
bash -x subsurface/packaging/copr/make-package.sh $GITHUB_REF_NAME
|
||||
|
||||
|
||||
47
.github/workflows/ios.yml
vendored
47
.github/workflows/ios.yml
vendored
@ -1,4 +1,5 @@
|
||||
name: iOS
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
@ -12,37 +13,49 @@ on:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
iOSBuild:
|
||||
build:
|
||||
runs-on: macOS-11
|
||||
steps:
|
||||
- name: switch to Xcode 11
|
||||
run: sudo xcode-select -s "/Applications/Xcode_11.7.app"
|
||||
|
||||
- name: checkout sources
|
||||
uses: actions/checkout@v1
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: recursive
|
||||
|
||||
- name: setup Homebrew
|
||||
run: brew install autoconf automake libtool pkg-config
|
||||
|
||||
- name: set our Qt build
|
||||
run: |
|
||||
env
|
||||
curl -L --output Qt-5.14.1-ios.tar.xz https://f002.backblazeb2.com/file/Subsurface-Travis/Qt-5.14.1-ios.tar.xz
|
||||
mkdir -p $HOME/Qt
|
||||
xzcat Qt-5.14.1-ios.tar.xz | tar -x -C $HOME/Qt -f -
|
||||
- name: checkout Qt resources
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: subsurface/qt-ios
|
||||
ref: main
|
||||
path: qt-ios
|
||||
|
||||
- name: store dummy version and build number for test build
|
||||
run: |
|
||||
echo "100" > latest-subsurface-buildnumber
|
||||
echo "CICD-test-build" > latest-subsurface-buildnumber-extension
|
||||
- name: set the version information
|
||||
id: version_number
|
||||
uses: ./.github/actions/manage-version
|
||||
with:
|
||||
nightly-builds-secret: ${{ secrets.NIGHTLY_BUILDS }}
|
||||
|
||||
- name: build Subsurface-mobile for iOS
|
||||
env:
|
||||
SUBSURFACE_REPO_PATH: ${{ github.workspace }}
|
||||
VERSION: ${{ steps.version_number.outputs.version }}
|
||||
run: |
|
||||
cd ${SUBSURFACE_REPO_PATH}/..
|
||||
git config --global --add safe.directory ${SUBSURFACE_REPO_PATH}
|
||||
git config --global --add safe.directory ${SUBSURFACE_REPO_PATH}/libdivecomputer
|
||||
ln -s $HOME/Qt Qt
|
||||
cd ..
|
||||
git config --global --add safe.directory $GITHUB_WORKSPACE
|
||||
git config --global --add safe.directory $GITHUB_WORKSPACE/libdivecomputer
|
||||
export IOS_QT=$GITHUB_WORKSPACE/qt-ios
|
||||
echo "build for simulator"
|
||||
bash -x $GITHUB_WORKSPACE/packaging/ios/build.sh -simulator
|
||||
# We need this in order to be able to access the file and publish it
|
||||
mv build-Subsurface-mobile-Qt_5_14_1_for_iOS-Release/Release-iphonesimulator/Subsurface-mobile.app $GITHUB_WORKSPACE/Subsurface-mobile-$VERSION.app
|
||||
|
||||
- name: publish artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: Subsurface-iOS-${{ steps.version_number.outputs.version }}
|
||||
path: Subsurface-mobile-*.app
|
||||
|
||||
55
.github/workflows/linux-bionic-5.9.yml.disabled
vendored
55
.github/workflows/linux-bionic-5.9.yml.disabled
vendored
@ -1,55 +0,0 @@
|
||||
name: Ubuntu 18.04 / Qt 5.9--
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
buildOnBionic:
|
||||
runs-on: ubuntu-18.04
|
||||
container:
|
||||
image: ubuntu:18.04 # yes, this looks redundant, but something is messed up with their Ubuntu image that causes our builds to fail
|
||||
|
||||
steps:
|
||||
- name: checkout sources
|
||||
uses: actions/checkout@v1
|
||||
|
||||
- name: add build dependencies
|
||||
run: |
|
||||
apt update
|
||||
apt install -y \
|
||||
autoconf automake cmake g++ git libcrypto++-dev libcurl4-gnutls-dev \
|
||||
libgit2-dev libqt5qml5 libqt5quick5 libqt5svg5-dev \
|
||||
libqt5webkit5-dev libsqlite3-dev libssh2-1-dev libssl-dev libssl-dev \
|
||||
libtool libusb-1.0-0-dev libxml2-dev libxslt1-dev libzip-dev make \
|
||||
pkg-config qml-module-qtlocation qml-module-qtpositioning \
|
||||
qml-module-qtquick2 qt5-default qt5-qmake qtchooser qtconnectivity5-dev \
|
||||
qtdeclarative5-dev qtdeclarative5-private-dev qtlocation5-dev \
|
||||
qtpositioning5-dev qtscript5-dev qttools5-dev qttools5-dev-tools \
|
||||
qtquickcontrols2-5-dev xvfb libbluetooth-dev libmtp-dev
|
||||
|
||||
- name: store dummy version and build number for pull request
|
||||
if: github.event_name == 'pull_request'
|
||||
run: |
|
||||
echo "6.0.100" > latest-subsurface-buildnumber
|
||||
|
||||
- name: build Subsurface
|
||||
env:
|
||||
SUBSURFACE_REPO_PATH: ${{ github.workspace }}
|
||||
run: |
|
||||
cd ..
|
||||
git config --global --add safe.directory ${SUBSURFACE_REPO_PATH}
|
||||
git config --global --add safe.directory ${SUBSURFACE_REPO_PATH}/libdivecomputer
|
||||
bash -x subsurface/scripts/build.sh -desktop -build-with-webkit
|
||||
|
||||
- name: test desktop build
|
||||
run: |
|
||||
# and now run the tests - with Qt 5.9 we can only run the desktop flavor
|
||||
echo "------------------------------------"
|
||||
echo "run tests"
|
||||
cd build/tests
|
||||
# xvfb-run --auto-servernum ./TestGitStorage -v2
|
||||
xvfb-run --auto-servernum make check
|
||||
@ -1,36 +1,27 @@
|
||||
name: Ubuntu 22.04 / Qt 5.15--
|
||||
name: Generic workflow for Debian and derivatives
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- scripts/docker/**
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- scripts/docker/**
|
||||
branches:
|
||||
- master
|
||||
workflow_call:
|
||||
inputs:
|
||||
container-image:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
buildUbuntuJammy:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ubuntu:22.04
|
||||
image: ${{ inputs.container-image }}
|
||||
|
||||
steps:
|
||||
- name: checkout sources
|
||||
uses: actions/checkout@v1
|
||||
|
||||
- name: get container ready for build
|
||||
env:
|
||||
SUBSURFACE_REPO_PATH: ${{ github.workspace }}
|
||||
run: |
|
||||
echo "--------------------------------------------------------------"
|
||||
echo "update distro and install dependencies"
|
||||
|
||||
apt-get update
|
||||
apt-get upgrade -y
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y -q --force-yes \
|
||||
apt-get dist-upgrade -y
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y -q \
|
||||
autoconf automake cmake g++ git libcrypto++-dev libcurl4-gnutls-dev \
|
||||
libgit2-dev libqt5qml5 libqt5quick5 libqt5svg5-dev \
|
||||
libqt5webkit5-dev libsqlite3-dev libssh2-1-dev libssl-dev libssl-dev \
|
||||
@ -44,13 +35,20 @@ jobs:
|
||||
|
||||
git config --global user.email "ci@subsurface-divelog.org"
|
||||
git config --global user.name "Subsurface CI"
|
||||
git config --global --add safe.directory ${SUBSURFACE_REPO_PATH}
|
||||
git config --global --add safe.directory ${SUBSURFACE_REPO_PATH}/libdivecomputer
|
||||
git config --global --add safe.directory $GITHUB_WORKSPACE
|
||||
git config --global --add safe.directory $GITHUB_WORKSPACE/libdivecomputer
|
||||
# needs git from the previous step
|
||||
- name: checkout sources
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: recursive
|
||||
|
||||
- name: store dummy version and build number for test build
|
||||
run: |
|
||||
echo "100" > latest-subsurface-buildnumber
|
||||
echo "CICD-test-build" > latest-subsurface-buildnumber-extension
|
||||
- name: set the version information
|
||||
id: version_number
|
||||
uses: ./.github/actions/manage-version
|
||||
with:
|
||||
no-increment: true
|
||||
|
||||
- name: build subsurface-mobile
|
||||
run: |
|
||||
19
.github/workflows/linux-debian-trixie-5.15.yml
vendored
Normal file
19
.github/workflows/linux-debian-trixie-5.15.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: Debian trixie / Qt 5.15--
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- scripts/docker/**
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- scripts/docker/**
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
do-build-test:
|
||||
uses: ./.github/workflows/linux-debian-generic.yml
|
||||
with:
|
||||
container-image: debian:trixie
|
||||
39
.github/workflows/linux-dockerimage.disabled
vendored
39
.github/workflows/linux-dockerimage.disabled
vendored
@ -1,39 +0,0 @@
|
||||
name: Linux Qt 5.12 Docker Image CI
|
||||
|
||||
#on:
|
||||
# push:
|
||||
# paths:
|
||||
# - scripts/docker/trusty-qt512/Dockerfile
|
||||
# - .github/workflows/linux-docker*
|
||||
|
||||
jobs:
|
||||
trusty-qt512:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
VERSION: ${{ '1.0' }} # 'official' images should have a dot-zero version
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
|
||||
- name: Get our pre-reqs
|
||||
run: |
|
||||
cd scripts/docker/trusty-qt512
|
||||
bash getpackages.sh
|
||||
|
||||
- name: set env
|
||||
run: |
|
||||
v=${{ env.VERSION }}
|
||||
b=${{ github.ref }} # -BRANCH suffix, unless the branch is master
|
||||
b=${b/refs\/heads\//}
|
||||
b=${b,,} # the name needs to be all lower case
|
||||
if [ $b = "master" ] ; then b="" ; else b="-$b" ; fi
|
||||
echo "::set-env name=NAME::subsurface/trusty-qt512${b}:${v}"
|
||||
|
||||
- name: Build and Publish Linux Docker image to Dockerhub
|
||||
uses: elgohr/Publish-Docker-Github-Action@master
|
||||
with:
|
||||
name: ${{ env.NAME }}
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
dockerfile: 'Dockerfile'
|
||||
workdir: './scripts/docker/trusty-qt512/'
|
||||
@ -1,4 +1,5 @@
|
||||
name: Fedora 35 / Qt 6--
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
@ -12,15 +13,12 @@ on:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
buildFedoraQt6:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: fedora:35
|
||||
|
||||
steps:
|
||||
- name: checkout sources
|
||||
uses: actions/checkout@v1
|
||||
|
||||
- name: get container ready for build
|
||||
run: |
|
||||
echo "--------------------------------------------------------------"
|
||||
@ -37,22 +35,27 @@ jobs:
|
||||
bluez-libs-devel libgit2-devel libzip-devel libmtp-devel \
|
||||
xorg-x11-server-Xvfb
|
||||
|
||||
- name: store dummy version and build number for test build
|
||||
run: |
|
||||
echo "100" > latest-subsurface-buildnumber
|
||||
echo "CICD-test-build" > latest-subsurface-buildnumber-extension
|
||||
- name: checkout sources
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: recursive
|
||||
|
||||
- name: set the version information
|
||||
id: version_number
|
||||
uses: ./.github/actions/manage-version
|
||||
with:
|
||||
no-increment: true
|
||||
|
||||
- name: build Subsurface
|
||||
env:
|
||||
SUBSURFACE_REPO_PATH: ${{ github.workspace }}
|
||||
run: |
|
||||
echo "--------------------------------------------------------------"
|
||||
echo "building desktop"
|
||||
|
||||
# now build for the desktop version (without WebKit)
|
||||
cd ..
|
||||
git config --global --add safe.directory ${SUBSURFACE_REPO_PATH}
|
||||
git config --global --add safe.directory ${SUBSURFACE_REPO_PATH}/libdivecomputer
|
||||
git config --global --add safe.directory $GITHUB_WORKSPACE
|
||||
git config --global --add safe.directory $GITHUB_WORKSPACE/libdivecomputer
|
||||
git config --global --get-all safe.directory
|
||||
bash -e -x subsurface/scripts/build.sh -desktop -build-with-qt6
|
||||
|
||||
85
.github/workflows/linux-focal-5.12.yml
vendored
85
.github/workflows/linux-focal-5.12.yml
vendored
@ -1,85 +0,0 @@
|
||||
name: Ubuntu 20.04 / Qt 5.12--
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- scripts/docker/**
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- scripts/docker/**
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
buildUbuntuFocal:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ubuntu:20.04
|
||||
|
||||
steps:
|
||||
- name: checkout sources
|
||||
uses: actions/checkout@v1
|
||||
|
||||
- name: get container ready for build
|
||||
run: |
|
||||
echo "--------------------------------------------------------------"
|
||||
echo "update distro and install dependencies"
|
||||
|
||||
apt-get update
|
||||
apt-get upgrade -y
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y -q --force-yes \
|
||||
autoconf automake cmake g++ git libcrypto++-dev libcurl4-gnutls-dev \
|
||||
libgit2-dev libqt5qml5 libqt5quick5 libqt5svg5-dev \
|
||||
libqt5webkit5-dev libsqlite3-dev libssh2-1-dev libssl-dev libssl-dev \
|
||||
libtool libusb-1.0-0-dev libxml2-dev libxslt1-dev libzip-dev make \
|
||||
pkg-config qml-module-qtlocation qml-module-qtpositioning \
|
||||
qml-module-qtquick2 qt5-qmake qtchooser qtconnectivity5-dev \
|
||||
qtdeclarative5-dev qtdeclarative5-private-dev qtlocation5-dev \
|
||||
qtpositioning5-dev qtscript5-dev qttools5-dev qttools5-dev-tools \
|
||||
qtquickcontrols2-5-dev xvfb libbluetooth-dev libmtp-dev
|
||||
|
||||
- name: store dummy version and build number for test build
|
||||
run: |
|
||||
echo "100" > latest-subsurface-buildnumber
|
||||
echo "CICD-test-build" > latest-subsurface-buildnumber-extension
|
||||
|
||||
- name: build Subsurface-mobile
|
||||
env:
|
||||
SUBSURFACE_REPO_PATH: ${{ github.workspace }}
|
||||
run: |
|
||||
echo "--------------------------------------------------------------"
|
||||
echo "building mobile"
|
||||
git config --global user.email "ci@subsurface-divelog.org"
|
||||
git config --global user.name "Subsurface CI"
|
||||
git config --global --add safe.directory ${SUBSURFACE_REPO_PATH}
|
||||
git config --global --add safe.directory ${SUBSURFACE_REPO_PATH}/libdivecomputer
|
||||
cd ..
|
||||
bash -e -x subsurface/scripts/build.sh -mobile
|
||||
|
||||
- name: test mobile build
|
||||
run: |
|
||||
echo "--------------------------------------------------------------"
|
||||
echo "running tests for mobile"
|
||||
|
||||
cd build-mobile/tests
|
||||
# xvfb-run --auto-servernum ./TestGitStorage -v2
|
||||
xvfb-run --auto-servernum make check
|
||||
|
||||
- name: build Subsurface
|
||||
run: |
|
||||
echo "--------------------------------------------------------------"
|
||||
echo "building desktop"
|
||||
|
||||
# now build for the desktop version (including WebKit)
|
||||
cd ..
|
||||
bash -e -x subsurface/scripts/build.sh -desktop -build-with-webkit
|
||||
|
||||
- name: test desktop build
|
||||
run: |
|
||||
echo "--------------------------------------------------------------"
|
||||
echo "running tests for desktop"
|
||||
cd build/tests
|
||||
# xvfb-run --auto-servernum ./TestGitStorage -v2
|
||||
xvfb-run --auto-servernum make check
|
||||
|
||||
18
.github/workflows/linux-snap.yml
vendored
18
.github/workflows/linux-snap.yml
vendored
@ -19,16 +19,16 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# Needed for version determination to work
|
||||
fetch-depth: 0
|
||||
submodules: recursive
|
||||
|
||||
- name: atomically create or retrieve the build number
|
||||
- name: set the version information
|
||||
id: version_number
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
bash scripts/get-atomic-buildnr.sh ${{ github.sha }} ${{ secrets.NIGHTLY_BUILDS }} "CICD-release"
|
||||
uses: ./.github/actions/manage-version
|
||||
with:
|
||||
nightly-builds-secret: ${{ secrets.NIGHTLY_BUILDS }}
|
||||
|
||||
- name: store dummy version and build number for pull request
|
||||
if: github.event_name == 'pull_request'
|
||||
@ -48,11 +48,11 @@ jobs:
|
||||
/snap/bin/lxc profile device add default ccache disk source=${HOME}/.ccache/ path=/root/.ccache
|
||||
|
||||
# Patch snapcraft.yaml to enable ccache
|
||||
patch -p1 < .github/workflows/linux-snap.patch
|
||||
patch -p1 < .github/workflows/scripts/linux-snap.patch
|
||||
|
||||
# Find common base between master and HEAD to use as cache key.
|
||||
git -c protocol.version=2 fetch --no-tags --prune --progress --no-recurse-submodules origin master
|
||||
echo "key=$( git merge-base origin/master ${{ github.sha }} )" >> $GITHUB_OUTPUT
|
||||
echo "key=$( git merge-base origin/master $GITHUB_SHA )" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: CCache
|
||||
uses: actions/cache@v3
|
||||
@ -73,7 +73,7 @@ jobs:
|
||||
|
||||
- name: Upload the snap
|
||||
if: github.event_name == 'push'
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ steps.build-snap.outputs.snap-name }}
|
||||
path: ${{ steps.build-snap.outputs.snap-path }}
|
||||
|
||||
77
.github/workflows/linux-trusty-5.12.yml
vendored
77
.github/workflows/linux-trusty-5.12.yml
vendored
@ -1,77 +0,0 @@
|
||||
name: Ubuntu 14.04 / Qt 5.12 for AppImage--
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- scripts/docker/**
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- scripts/docker/**
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
buildAppImage:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: docker://subsurface/trusty-qt512:1.1
|
||||
|
||||
steps:
|
||||
- name: checkout sources
|
||||
uses: actions/checkout@v1
|
||||
|
||||
- name: atomically create or retrieve the build number and assemble release notes
|
||||
id: version_number
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
bash ./scripts/get-atomic-buildnr.sh ${{ github.sha }} ${{ secrets.NIGHTLY_BUILDS }} "CICD-release"
|
||||
version=$(cat release-version)
|
||||
echo "version=$version" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: store dummy version and build number for pull request
|
||||
if: github.event_name == 'pull_request'
|
||||
run: |
|
||||
echo "100" > latest-subsurface-buildnumber
|
||||
echo "CICD-pull-request" > latest-subsurface-buildnumber-extension
|
||||
|
||||
- name: run build
|
||||
env:
|
||||
SUBSURFACE_REPO_PATH: ${{ github.workspace }}
|
||||
run: |
|
||||
cd ..
|
||||
git config --global --add safe.directory ${SUBSURFACE_REPO_PATH}
|
||||
git config --global --add safe.directory ${SUBSURFACE_REPO_PATH}/libdivecomputer
|
||||
rm -rf /install-root/include/libdivecomputer
|
||||
bash -x subsurface/.github/workflows/scripts/linux-in-container-build.sh
|
||||
|
||||
- name: prepare PR artifacts
|
||||
if: github.event_name == 'pull_request'
|
||||
run: |
|
||||
mkdir -p Linux-artifacts
|
||||
mv Subsurface.AppImage Linux-artifacts
|
||||
|
||||
- name: PR artifacts
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Linux-artifacts
|
||||
path: Linux-artifacts
|
||||
|
||||
- name: prepare release artifacts
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
mv Subsurface.AppImage Subsurface-v${{ steps.version_number.outputs.version }}.AppImage
|
||||
|
||||
# only publish a 'release' on push events (those include merging a PR)
|
||||
- name: upload binaries
|
||||
if: github.event_name == 'push'
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
tag_name: v${{ steps.version_number.outputs.version }}
|
||||
repository: ${{ github.repository_owner }}/nightly-builds
|
||||
token: ${{ secrets.NIGHTLY_BUILDS }}
|
||||
prerelease: false
|
||||
fail_on_unmatched_files: true
|
||||
files: |
|
||||
./Subsurface*.AppImage
|
||||
149
.github/workflows/linux-ubuntu-16.04-5.12-appimage.yml
vendored
Normal file
149
.github/workflows/linux-ubuntu-16.04-5.12-appimage.yml
vendored
Normal file
@ -0,0 +1,149 @@
|
||||
name: Ubuntu 16.04 / Qt 5.15-- for AppImage
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- scripts/docker/**
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- scripts/docker/**
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ubuntu:16.04
|
||||
|
||||
steps:
|
||||
- name: get container ready for build
|
||||
run: |
|
||||
echo "--------------------------------------------------------------"
|
||||
echo "update distro and install dependencies"
|
||||
|
||||
apt-get update
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y -q \
|
||||
software-properties-common
|
||||
|
||||
add-apt-repository -y ppa:savoury1/qt-5-15
|
||||
add-apt-repository -y ppa:savoury1/kde-5-80
|
||||
add-apt-repository -y ppa:savoury1/gpg
|
||||
add-apt-repository -y ppa:savoury1/ffmpeg4
|
||||
add-apt-repository -y ppa:savoury1/vlc3
|
||||
add-apt-repository -y ppa:savoury1/gcc-9
|
||||
add-apt-repository -y ppa:savoury1/display
|
||||
add-apt-repository -y ppa:savoury1/apt-xenial
|
||||
add-apt-repository -y ppa:savoury1/gtk-xenial
|
||||
add-apt-repository -y ppa:savoury1/qt-xenial
|
||||
add-apt-repository -y ppa:savoury1/kde-xenial
|
||||
add-apt-repository -y ppa:savoury1/backports
|
||||
add-apt-repository -y ppa:savoury1/build-tools
|
||||
apt-get update
|
||||
apt-get dist-upgrade -y
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y -q \
|
||||
autoconf automake cmake g++ g++-9 git libcrypto++-dev libcurl4-gnutls-dev \
|
||||
libgit2-dev libqt5qml5 libqt5quick5 libqt5svg5-dev \
|
||||
libqt5webkit5-dev libsqlite3-dev libssh2-1-dev libssl-dev libssl-dev \
|
||||
libtool libusb-1.0-0-dev libxml2-dev libxslt1-dev libzip-dev make \
|
||||
pkg-config qml-module-qtlocation qml-module-qtpositioning \
|
||||
qml-module-qtquick2 qt5-qmake qtchooser qtconnectivity5-dev \
|
||||
qtdeclarative5-dev qtdeclarative5-private-dev qtlocation5-dev \
|
||||
qtpositioning5-dev qtscript5-dev qttools5-dev qttools5-dev-tools \
|
||||
qtquickcontrols2-5-dev xvfb libbluetooth-dev libmtp-dev liblzma-dev \
|
||||
curl
|
||||
|
||||
update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-9 60 \
|
||||
--slave /usr/bin/g++ g++ /usr/bin/g++-9
|
||||
|
||||
- name: checkout sources
|
||||
# We cannot update this as glibc on 16.04 is too old for node 20.
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: recursive
|
||||
|
||||
- name: set the version information
|
||||
id: version_number
|
||||
uses: ./.github/actions/manage-version
|
||||
with:
|
||||
nightly-builds-secret: ${{ secrets.NIGHTLY_BUILDS }}
|
||||
|
||||
- name: build Subsurface
|
||||
run: |
|
||||
echo "--------------------------------------------------------------"
|
||||
echo "building desktop"
|
||||
|
||||
# now build the appimage
|
||||
cd ..
|
||||
bash -e -x subsurface/scripts/build.sh -desktop -create-appdir -build-with-webkit
|
||||
|
||||
- name: test desktop build
|
||||
run: |
|
||||
echo "--------------------------------------------------------------"
|
||||
echo "running tests for desktop"
|
||||
cd build/tests
|
||||
# xvfb-run --auto-servernum ./TestGitStorage -v2
|
||||
xvfb-run --auto-servernum make check
|
||||
|
||||
- name: build appimage
|
||||
env:
|
||||
VERSION: ${{ steps.version_number.outputs.version }}
|
||||
run: |
|
||||
echo "--------------------------------------------------------------"
|
||||
echo "assembling AppImage"
|
||||
|
||||
export QT_PLUGIN_PATH=$QT_ROOT/plugins
|
||||
export QT_QPA_PLATFORM_PLUGIN_PATH=$QT_ROOT/plugins
|
||||
export QT_DEBUG_PLUGINS=1
|
||||
|
||||
cd ..
|
||||
|
||||
# set up the appdir
|
||||
mkdir -p appdir/usr/plugins/
|
||||
|
||||
# mv googlemaps plugins into place
|
||||
mv appdir/usr/usr/lib/x86_64-linux-gnu/qt5/plugins/* appdir/usr/plugins # the usr/usr is not a typo, that's where it ends up
|
||||
rm -rf appdir/usr/home/ appdir/usr/include/ appdir/usr/share/man/ # No need to ship developer and man files as part of the AppImage
|
||||
rm -rf appdir/usr/usr appdir/usr/lib/x86_64-linux-gnu/cmake appdir/usr/lib/pkgconfig
|
||||
cp /usr/lib/x86_64-linux-gnu/libssl.so.1.1 appdir/usr/lib/
|
||||
cp /usr/lib/x86_64-linux-gnu/libcrypto.so.1.1 appdir/usr/lib/
|
||||
|
||||
# get the linuxdeployqt tool and run it to collect the libraries
|
||||
curl -L -O "https://github.com/probonopd/linuxdeployqt/releases/download/7/linuxdeployqt-7-x86_64.AppImage"
|
||||
chmod a+x linuxdeployqt*.AppImage
|
||||
unset QTDIR
|
||||
unset QT_PLUGIN_PATH
|
||||
unset LD_LIBRARY_PATH
|
||||
./linuxdeployqt*.AppImage --appimage-extract-and-run ./appdir/usr/share/applications/*.desktop -exclude-libs=libdbus-1.so.3 -bundle-non-qt-libs -qmldir=./subsurface/stats -qmldir=./subsurface/map-widget/ -verbose=2
|
||||
|
||||
# create the AppImage
|
||||
./linuxdeployqt*.AppImage --appimage-extract-and-run ./appdir/usr/share/applications/*.desktop -exclude-libs=libdbus-1.so.3 -appimage -qmldir=./subsurface/stats -qmldir=./subsurface/map-widget/ -verbose=2
|
||||
|
||||
# copy AppImage to the calling VM
|
||||
# with GitHub Actions the $GITHUB_WORKSPACE directory is the current working directory at the start of a step
|
||||
cp Subsurface*.AppImage* $GITHUB_WORKSPACE/Subsurface-$VERSION.AppImage
|
||||
|
||||
- name: PR artifacts
|
||||
if: github.event_name == 'pull_request'
|
||||
# We cannot update this as glibc on 16.04 is too old for node 20.
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Subsurface-Linux-AppImage-${{ steps.version_number.outputs.version }}
|
||||
path: Subsurface-*.AppImage
|
||||
compression-level: 0
|
||||
|
||||
# only publish a 'release' on push events (those include merging a PR)
|
||||
- name: upload binaries
|
||||
if: github.event_name == 'push'
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
tag_name: v${{ steps.version_number.outputs.version }}
|
||||
repository: ${{ github.repository_owner }}/nightly-builds
|
||||
token: ${{ secrets.NIGHTLY_BUILDS }}
|
||||
prerelease: false
|
||||
fail_on_unmatched_files: true
|
||||
files: |
|
||||
./Subsurface-*.AppImage
|
||||
19
.github/workflows/linux-ubuntu-20.04-5.15.yml
vendored
Normal file
19
.github/workflows/linux-ubuntu-20.04-5.15.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: Ubuntu 20.04 / Qt 5.12--
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- scripts/docker/**
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- scripts/docker/**
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
do-build-test:
|
||||
uses: ./.github/workflows/linux-debian-generic.yml
|
||||
with:
|
||||
container-image: ubuntu:20.04
|
||||
19
.github/workflows/linux-ubuntu-22.04-5.15.yml
vendored
Normal file
19
.github/workflows/linux-ubuntu-22.04-5.15.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: Ubuntu 22.04 / Qt 5.15--
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- scripts/docker/**
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- scripts/docker/**
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
do-build-test:
|
||||
uses: ./.github/workflows/linux-debian-generic.yml
|
||||
with:
|
||||
container-image: ubuntu:22.04
|
||||
19
.github/workflows/linux-ubuntu-24.04-5.15.yml
vendored
Normal file
19
.github/workflows/linux-ubuntu-24.04-5.15.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: Ubuntu 24.04 / Qt 5.15--
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- scripts/docker/**
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- scripts/docker/**
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
do-build-test:
|
||||
uses: ./.github/workflows/linux-debian-generic.yml
|
||||
with:
|
||||
container-image: ubuntu:24.04
|
||||
53
.github/workflows/mac.yml
vendored
53
.github/workflows/mac.yml
vendored
@ -1,4 +1,5 @@
|
||||
name: Mac
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
@ -11,38 +12,38 @@ on:
|
||||
branches:
|
||||
- master
|
||||
|
||||
|
||||
jobs:
|
||||
buildMac:
|
||||
build:
|
||||
runs-on: macOS-11
|
||||
steps:
|
||||
- name: checkout sources
|
||||
uses: actions/checkout@v1
|
||||
|
||||
- name: atomically create or retrieve the build number and assemble release notes
|
||||
id: version_number
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
bash scripts/get-atomic-buildnr.sh ${{ github.sha }} ${{ secrets.NIGHTLY_BUILDS }} "CICD-release"
|
||||
version=$(cat release-version)
|
||||
echo "version=$version" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: store dummy version and build number for pull request
|
||||
if: github.event_name == 'pull_request'
|
||||
run: |
|
||||
echo "100" > latest-subsurface-buildnumber
|
||||
echo "CICD-pull-request" > latest-subsurface-buildnumber-extension
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: recursive
|
||||
|
||||
- name: setup Homebrew
|
||||
run: brew install hidapi libxslt libjpg libmtp create-dmg confuse
|
||||
- name: set our Qt build
|
||||
run: |
|
||||
curl --output ssrf-Qt-5.15.2-mac.tar.xz https://f002.backblazeb2.com/file/Subsurface-Travis/ssrf-Qt5.15.2.tar.xz
|
||||
tar -xJf ssrf-Qt-5.15.2-mac.tar.xz
|
||||
|
||||
- name: checkout Qt resources
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: subsurface/qt-mac
|
||||
ref: main
|
||||
path: qt-mac
|
||||
|
||||
- name: set the version information
|
||||
id: version_number
|
||||
uses: ./.github/actions/manage-version
|
||||
with:
|
||||
nightly-builds-secret: ${{ secrets.NIGHTLY_BUILDS }}
|
||||
|
||||
- name: build Subsurface
|
||||
id: build
|
||||
run: |
|
||||
cd ${GITHUB_WORKSPACE}/..
|
||||
export QT_ROOT=${GITHUB_WORKSPACE}/Qt5.15.2/5.15.2/clang_64
|
||||
export QT_ROOT=${GITHUB_WORKSPACE}/qt-mac/Qt5.15.13
|
||||
export QT_QPA_PLATFORM_PLUGIN_PATH=$QT_ROOT/plugins
|
||||
export PATH=$QT_ROOT/bin:$PATH
|
||||
export CMAKE_PREFIX_PATH=$QT_ROOT/lib/cmake
|
||||
@ -58,10 +59,18 @@ jobs:
|
||||
echo "Created $IMG"
|
||||
echo "dmg=$IMG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: publish pull request artifacts
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: Subsurface-MacOS-${{ steps.version_number.outputs.version }}
|
||||
path: ${{ steps.build.outputs.dmg }}
|
||||
compression-level: 0
|
||||
|
||||
# only publish a 'release' on push events (those include merging a PR)
|
||||
- name: upload binaries
|
||||
if: github.event_name == 'push'
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
tag_name: v${{ steps.version_number.outputs.version }}
|
||||
repository: ${{ github.repository_owner }}/nightly-builds
|
||||
|
||||
29
.github/workflows/post-releasenotes.yml
vendored
29
.github/workflows/post-releasenotes.yml
vendored
@ -1,4 +1,5 @@
|
||||
name: Post Release
|
||||
name: Post Release Notes
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
@ -6,29 +7,35 @@ on:
|
||||
branches:
|
||||
- master
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
jobs:
|
||||
postRelease:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout sources
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: recursive
|
||||
|
||||
- name: set the version information
|
||||
id: version_number
|
||||
uses: ./.github/actions/manage-version
|
||||
with:
|
||||
nightly-builds-secret: ${{ secrets.NIGHTLY_BUILDS }}
|
||||
|
||||
# since we are running this step on a pull request, we will skip build numbers in releases
|
||||
- name: atomically create or retrieve the build number and assemble release notes
|
||||
id: version_number
|
||||
- name: assemble release notes
|
||||
env:
|
||||
EVENT_HEAD_COMMIT_ID: ${{ github.event.head_commit.id }}
|
||||
# Required because we are using the GitHub CLI in 'create-releasenotes.sh'
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
bash -x ./scripts/get-atomic-buildnr.sh ${{ github.sha }} ${{ secrets.NIGHTLY_BUILDS }} "CICD-release"
|
||||
bash scripts/create-releasenotes.sh ${{ github.event.head_commit.id }}
|
||||
version=$(cat release-version)
|
||||
echo "version=$version" >> $GITHUB_OUTPUT
|
||||
scripts/create-releasenotes.sh $EVENT_HEAD_COMMIT_ID
|
||||
|
||||
# add a file containing the release title so it can be picked up and listed on the release page on our web server
|
||||
- name: publish release
|
||||
if: github.event_name == 'push'
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
tag_name: v${{ steps.version_number.outputs.version }}
|
||||
repository: ${{ github.repository_owner }}/nightly-builds
|
||||
|
||||
2
.github/workflows/scripts/check_usns.py
vendored
2
.github/workflows/scripts/check_usns.py
vendored
@ -23,13 +23,11 @@ logger.setLevel(logging.INFO)
|
||||
|
||||
APPLICATION = "subsurface-ci"
|
||||
LAUNCHPAD = "production"
|
||||
RELEASE = "bionic"
|
||||
TEAM = "subsurface"
|
||||
SOURCE_NAME = "subsurface"
|
||||
SNAPS = {
|
||||
"subsurface": {
|
||||
"stable": {"recipe": "subsurface-stable"},
|
||||
"candidate": {"recipe": "subsurface-candidate"},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -1,58 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -x
|
||||
set -e
|
||||
|
||||
# this gets executed by the GitHub Action when building an AppImage for Linux
|
||||
# inside of the trusty-qt512 container
|
||||
|
||||
export PATH=$QT_ROOT/bin:$PATH # Make sure correct qmake is found on the $PATH for linuxdeployqt
|
||||
export CMAKE_PREFIX_PATH=$QT_ROOT/lib/cmake
|
||||
|
||||
# echo "--------------------------------------------------------------"
|
||||
# echo "install missing packages"
|
||||
# apt install -y libbluetooth-dev libmtp-dev
|
||||
|
||||
# the container currently has things under / that need to be under /__w/subsurface/subsurface instead
|
||||
cp -a /appdir /__w/subsurface/
|
||||
cp -a /install-root /__w/subsurface/
|
||||
|
||||
echo "--------------------------------------------------------------"
|
||||
echo "building desktop"
|
||||
|
||||
# now build our AppImage
|
||||
bash -e -x subsurface/scripts/build.sh -desktop -create-appdir -build-with-webkit -quick
|
||||
|
||||
echo "--------------------------------------------------------------"
|
||||
echo "assembling AppImage"
|
||||
|
||||
export QT_PLUGIN_PATH=$QT_ROOT/plugins
|
||||
export QT_QPA_PLATFORM_PLUGIN_PATH=$QT_ROOT/plugins
|
||||
export QT_DEBUG_PLUGINS=1
|
||||
|
||||
# set up the appdir
|
||||
mkdir -p appdir/usr/plugins/
|
||||
|
||||
# mv googlemaps plugins into place
|
||||
mv appdir/usr/usr/local/Qt/5.*/gcc_64/plugins/* appdir/usr/plugins # the usr/usr is not a typo, that's where it ends up
|
||||
rm -rf appdir/usr/home/ appdir/usr/include/ appdir/usr/share/man/ # No need to ship developer and man files as part of the AppImage
|
||||
rm -rf appdir/usr/usr appdir/usr/lib/cmake appdir/usr/lib/pkgconfig
|
||||
cp /ssllibs/libssl.so appdir/usr/lib/libssl.so.1.1
|
||||
cp /ssllibs/libcrypto.so appdir/usr/lib/libcrypto.so.1.1
|
||||
|
||||
# get the linuxdeployqt tool and run it to collect the libraries
|
||||
curl -L -O "https://github.com/probonopd/linuxdeployqt/releases/download/7/linuxdeployqt-7-x86_64.AppImage"
|
||||
chmod a+x linuxdeployqt*.AppImage
|
||||
unset QTDIR
|
||||
unset QT_PLUGIN_PATH
|
||||
unset LD_LIBRARY_PATH
|
||||
./linuxdeployqt*.AppImage --appimage-extract-and-run ./appdir/usr/share/applications/*.desktop -exclude-libs=libdbus-1.so.3 -bundle-non-qt-libs -qmldir=./subsurface/stats -qmldir=./subsurface/map-widget/ -verbose=2
|
||||
|
||||
# create the AppImage
|
||||
export VERSION=$(cd subsurface/scripts ; ./get-version) # linuxdeployqt uses this for naming the file
|
||||
./linuxdeployqt*.AppImage --appimage-extract-and-run ./appdir/usr/share/applications/*.desktop -exclude-libs=libdbus-1.so.3 -appimage -qmldir=./subsurface/stats -qmldir=./subsurface/map-widget/ -verbose=2
|
||||
|
||||
# copy AppImage to the calling VM
|
||||
# with GitHub Actions the /${GITHUB_WORKSPACE} directory is the current working directory at the start of a step
|
||||
cp Subsurface*.AppImage* /${GITHUB_WORKSPACE}/Subsurface.AppImage
|
||||
ls -l /${GITHUB_WORKSPACE}/Subsurface.AppImage
|
||||
15
.github/workflows/ubuntu-launchpad-build.yml
vendored
15
.github/workflows/ubuntu-launchpad-build.yml
vendored
@ -15,13 +15,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out sources
|
||||
uses: actions/checkout@v1
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: recursive
|
||||
|
||||
- name: atomically create or retrieve the build number
|
||||
- name: set the version information
|
||||
id: version_number
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
bash scripts/get-atomic-buildnr.sh ${{ github.sha }} ${{ secrets.NIGHTLY_BUILDS }} "CICD-release"
|
||||
uses: ./.github/actions/manage-version
|
||||
with:
|
||||
nightly-builds-secret: ${{ secrets.NIGHTLY_BUILDS }}
|
||||
|
||||
- name: Setup build dependencies
|
||||
run: |
|
||||
@ -48,5 +51,5 @@ jobs:
|
||||
- name: run the launchpad make-package script
|
||||
run: |
|
||||
cd ..
|
||||
bash -x subsurface/packaging/ubuntu/make-package.sh ${{ github.ref_name }}
|
||||
bash -x subsurface/packaging/ubuntu/make-package.sh $GITHUB_REF_NAME
|
||||
|
||||
|
||||
@ -16,17 +16,17 @@ jobs:
|
||||
mxe_sha: 'c0bfefc57a00fdf6cb5278263e21a478e47b0bf5'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Build the name for the docker image
|
||||
id: build_name
|
||||
run: |
|
||||
v=${{ env.VERSION }}
|
||||
b=${{ github.ref }} # -BRANCH suffix, unless the branch is master
|
||||
v=$VERSION
|
||||
b=$GITHUB_REF # -BRANCH suffix, unless the branch is master
|
||||
b=${b/refs\/heads\//}
|
||||
b=${b,,} # the name needs to be all lower case
|
||||
if [ $b = "master" ] ; then b="" ; else b="-$b" ; fi
|
||||
echo "NAME=${{ github.repository_owner }}/mxe-build${b}:${v}" >> $GITHUB_OUTPUT
|
||||
echo "NAME=$GITHUB_REPOSITORY_OWNER/mxe-build${b}:${v}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build and Publish Linux Docker image to Dockerhub
|
||||
uses: elgohr/Publish-Docker-Github-Action@v5
|
||||
|
||||
40
.github/workflows/windows.yml
vendored
40
.github/workflows/windows.yml
vendored
@ -1,4 +1,5 @@
|
||||
name: Windows
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
@ -12,28 +13,23 @@ on:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
buildWindows:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: docker://subsurface/mxe-build:3.1.0
|
||||
|
||||
steps:
|
||||
- name: checkout sources
|
||||
uses: actions/checkout@v1
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: recursive
|
||||
|
||||
- name: atomically create or retrieve the build number and assemble release notes
|
||||
- name: set the version information
|
||||
id: version_number
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
bash scripts/get-atomic-buildnr.sh ${{ github.sha }} ${{ secrets.NIGHTLY_BUILDS }} "CICD-release"
|
||||
version=$(cat release-version)
|
||||
echo "version=$version" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: store dummy version and build number for pull request
|
||||
if: github.event_name == 'pull_request'
|
||||
run: |
|
||||
echo "100" > latest-subsurface-buildnumber
|
||||
echo "CICD-pull-request" > latest-subsurface-buildnumber-extension
|
||||
uses: ./.github/actions/manage-version
|
||||
with:
|
||||
nightly-builds-secret: ${{ secrets.NIGHTLY_BUILDS }}
|
||||
|
||||
- name: get other dependencies
|
||||
env:
|
||||
@ -44,18 +40,28 @@ jobs:
|
||||
git config --global --add safe.directory ${SUBSURFACE_REPO_PATH}/libdivecomputer
|
||||
cd /win
|
||||
ln -s /__w/subsurface/subsurface .
|
||||
bash -x subsurface/.github/workflows/scripts/windows-container-prep.sh 2>&1 | tee pre-build.log
|
||||
bash -x subsurface/packaging/windows/container-prep.sh 2>&1 | tee pre-build.log
|
||||
|
||||
- name: run build
|
||||
run: |
|
||||
export OUTPUT_DIR="$GITHUB_WORKSPACE"
|
||||
cd /win
|
||||
bash -x subsurface/.github/workflows/scripts/windows-in-container-build.sh 2>&1 | tee build.log
|
||||
bash -x subsurface/packaging/windows/in-container-build.sh 2>&1 | tee build.log
|
||||
grep "Built target installer" build.log
|
||||
|
||||
- name: publish pull request artifacts
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: Subsurface-Windows-${{ steps.version_number.outputs.version }}
|
||||
path: |
|
||||
subsurface*.exe*
|
||||
smtk2ssrf*.exe
|
||||
|
||||
# only publish a 'release' on push events (those include merging a PR)
|
||||
- name: upload binaries
|
||||
if: github.event_name == 'push'
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
tag_name: v${{ steps.version_number.outputs.version }}
|
||||
repository: ${{ github.repository_owner }}/nightly-builds
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -49,3 +49,4 @@ appdata/subsurface.appdata.xml
|
||||
android-mobile/Roboto-Regular.ttf
|
||||
gh_release_notes.md
|
||||
release_content_title.txt
|
||||
/output/
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
statistics: show proper dates in January
|
||||
desktop: add country to the fields indexed for full text search
|
||||
import: update libdivecomputer version, add support for the Scubapro G3 / Luna and Shearwater Tern
|
||||
desktop: add a button linking to the 'Contribute' page
|
||||
|
||||
@ -124,8 +124,8 @@ if (SUBSURFACE_ASAN_BUILD)
|
||||
endif()
|
||||
|
||||
# every compiler understands -Wall
|
||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall")
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall")
|
||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -Werror=format")
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Werror=format")
|
||||
|
||||
# by detault optimize with -O2 even for debug builds
|
||||
set (GCC_OPTIMIZATION_FLAGS "-O2" CACHE STRING "GCC optimization flags")
|
||||
@ -320,7 +320,7 @@ elseif(CMAKE_SYSTEM_NAME STREQUAL "Linux")
|
||||
endif()
|
||||
elseif(CMAKE_SYSTEM_NAME STREQUAL "Darwin")
|
||||
execute_process(
|
||||
COMMAND bash scripts/get-version
|
||||
COMMAND bash scripts/get-version.sh
|
||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
||||
OUTPUT_VARIABLE SSRF_VERSION_STRING
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
|
||||
@ -135,7 +135,7 @@ msgid ""
|
||||
"mailto:subsurface@subsurface-divelog.org[our mailing list] and report bugs "
|
||||
"at https://github.com/Subsurface/subsurface/issues[our bugtracker]. "
|
||||
"For instructions on how to build the software and (if needed) its "
|
||||
"dependencies please consult the INSTALL file included with the source code."
|
||||
"dependencies please consult the INSTALL.md file included with the source code."
|
||||
msgstr ""
|
||||
|
||||
#. type: Plain text
|
||||
|
||||
@ -175,7 +175,7 @@ msgid ""
|
||||
"an email to mailto:subsurface@subsurface-divelog.org[our mailing list] and "
|
||||
"report bugs at https://github.com/Subsurface-divelog/subsurface/issues[our "
|
||||
"bugtracker]. For instructions on how to build the software and (if needed) "
|
||||
"its dependencies please consult the INSTALL file included with the source "
|
||||
"its dependencies please consult the INSTALL.md file included with the source "
|
||||
"code."
|
||||
msgstr ""
|
||||
"Ce manuel explique comment utiliser le programme _Subsurface_. Pour "
|
||||
@ -184,7 +184,7 @@ msgstr ""
|
||||
"pouvez envoyer un e-mail sur mailto:subsurface@subsurface-divelog.org[notre "
|
||||
"liste de diffusion] et rapportez les bogues sur http://trac.hohndel."
|
||||
"org[notre bugtracker]. Pour des instructions de compilation du logiciel et "
|
||||
"(si besoin) de ses dépendances, merci de consulter le fichier INSTALL inclus "
|
||||
"(si besoin) de ses dépendances, merci de consulter le fichier INSTALL.md inclus "
|
||||
"dans les sources logicielles."
|
||||
|
||||
#. type: Plain text
|
||||
|
||||
@ -460,7 +460,7 @@ the software, consult the <em>Downloads</em> page on the
|
||||
Please discuss issues with this program by sending an email to
|
||||
<a href="mailto:subsurface@subsurface-divelog.org">our mailing list</a> and report bugs at
|
||||
<a href="https://github.com/Subsurface/subsurface/issues">our bugtracker</a>. For instructions on how to build the
|
||||
software and (if needed) its dependencies please consult the INSTALL file
|
||||
software and (if needed) its dependencies please consult the INSTALL.md file
|
||||
included with the source code.</p></div>
|
||||
<div class="paragraph"><p><strong>Audience</strong>: Recreational Scuba Divers, Free Divers, Tec Divers, Professional
|
||||
Divers</p></div>
|
||||
|
||||
@ -34,7 +34,7 @@ https://subsurface-divelog.org/[_Subsurface_ web site].
|
||||
Please discuss issues with this program by sending an email to
|
||||
mailto:subsurface@subsurface-divelog.org[our mailing list] and report bugs at
|
||||
https://github.com/Subsurface/subsurface/issues[our bugtracker]. For instructions on how to build the
|
||||
software and (if needed) its dependencies please consult the INSTALL file
|
||||
software and (if needed) its dependencies please consult the INSTALL.md file
|
||||
included with the source code.
|
||||
|
||||
*Audience*: Recreational Scuba Divers, Free Divers, Tec Divers, Professional
|
||||
|
||||
@ -517,7 +517,7 @@ web</a>. Por favor, comenta los problemas que tengas con este programa enviando
|
||||
mail a <a href="mailto:subsurface@subsurface-divelog.org">nuestra lista de correo</a> e informa de
|
||||
fallos en <a href="https://github.com/Subsurface/subsurface/issues">nuestro bugtracker</a>.
|
||||
Para instrucciones acerca de como compilar el software y (en caso necesario)
|
||||
sus dependencias, por favor, consulta el archivo INSTALL incluido con el código
|
||||
sus dependencias, por favor, consulta el archivo INSTALL.md incluido con el código
|
||||
fuente.</p></div>
|
||||
<div class="paragraph"><p><strong>Audiencia</strong>: Buceadores recreativos, Buceadores en apnea, Buceadores técnicos,
|
||||
Buceadores profesionales.</p></div>
|
||||
|
||||
@ -61,7 +61,7 @@ web]. Por favor, comenta los problemas que tengas con este programa enviando un
|
||||
mail a mailto:subsurface@subsurface-divelog.org[nuestra lista de correo] e informa de
|
||||
fallos en https://github.com/Subsurface/subsurface/issues[nuestro bugtracker].
|
||||
Para instrucciones acerca de como compilar el software y (en caso necesario)
|
||||
sus dependencias, por favor, consulta el archivo INSTALL incluido con el código
|
||||
sus dependencias, por favor, consulta el archivo INSTALL.md incluido con el código
|
||||
fuente.
|
||||
|
||||
*Audiencia*: Buceadores recreativos, Buceadores en apnea, Buceadores técnicos,
|
||||
|
||||
@ -526,7 +526,7 @@ problème, vous pouvez envoyer un e-mail sur
|
||||
<a href="mailto:subsurface@subsurface-divelog.org">notre liste de diffusion</a> et
|
||||
rapportez les bogues sur <a href="http://trac.hohndel.org">notre bugtracker</a>. Pour
|
||||
des instructions de compilation du logiciel et (si besoin) de ses
|
||||
dépendances, merci de consulter le fichier INSTALL inclus dans les sources
|
||||
dépendances, merci de consulter le fichier INSTALL.md inclus dans les sources
|
||||
logicielles.</p></div>
|
||||
<div class="paragraph"><p><strong>Public</strong> : Plongeurs loisirs, apnéistes, plongeurs Tek et plongeurs
|
||||
professionnels</p></div>
|
||||
|
||||
@ -61,7 +61,7 @@ problème, vous pouvez envoyer un e-mail sur
|
||||
mailto:subsurface@subsurface-divelog.org[notre liste de diffusion] et
|
||||
rapportez les bogues sur http://trac.hohndel.org[notre bugtracker]. Pour
|
||||
des instructions de compilation du logiciel et (si besoin) de ses
|
||||
dépendances, merci de consulter le fichier INSTALL inclus dans les sources
|
||||
dépendances, merci de consulter le fichier INSTALL.md inclus dans les sources
|
||||
logicielles.
|
||||
|
||||
*Public* : Plongeurs loisirs, apnéistes, plongeurs Tek et plongeurs
|
||||
|
||||
@ -516,7 +516,7 @@ het programma kunnen bij de ontwikkelaars gemeld worden via email op
|
||||
<a href="mailto:subsurface@subsurface-divelog.org">onze mailinglijst</a>. Fouten kunnen
|
||||
ook gemeld worden op <a href="https://github.com/Subsurface/subsurface/issues">onze bugtracker</a>.
|
||||
Instructies hoe <em>Subsurface</em> zelf te compileren vanuit de broncode staan ook op
|
||||
onze website en in het INSTALL bestand in de broncode.</p></div>
|
||||
onze website en in het INSTALL.md bestand in de broncode.</p></div>
|
||||
<div class="paragraph"><p><strong>Doelgroep</strong>: Recreatieve duikers, Tec duikers, Apneu duikers,
|
||||
Professionele duikers.</p></div>
|
||||
<div id="toc">
|
||||
|
||||
@ -59,7 +59,7 @@ het programma kunnen bij de ontwikkelaars gemeld worden via email op
|
||||
mailto:subsurface@subsurface-divelog.org[onze mailinglijst]. Fouten kunnen
|
||||
ook gemeld worden op https://github.com/Subsurface/subsurface/issues[onze bugtracker].
|
||||
Instructies hoe _Subsurface_ zelf te compileren vanuit de broncode staan ook op
|
||||
onze website en in het INSTALL bestand in de broncode.
|
||||
onze website en in het INSTALL.md bestand in de broncode.
|
||||
|
||||
*Doelgroep*: Recreatieve duikers, Tec duikers, Apneu duikers,
|
||||
Professionele duikers.
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
Building Subsurface from Source
|
||||
===============================
|
||||
# Building Subsurface from Source
|
||||
|
||||
Subsurface uses quite a few open source libraries and frameworks to do its
|
||||
job. The most important ones include libdivecomputer, Qt, libxml2, libxslt,
|
||||
@ -13,23 +12,27 @@ Below are instructions for building Subsurface
|
||||
- iOS (cross-building)
|
||||
|
||||
|
||||
Getting Subsurface source
|
||||
-------------------------
|
||||
## Getting Subsurface source
|
||||
|
||||
You can get the sources to the latest development version from our git
|
||||
repository:
|
||||
git clone http://github.com/Subsurface/subsurface.git
|
||||
cd subsurface
|
||||
git submodule init # this will give you our flavor of libdivecomputer
|
||||
|
||||
```
|
||||
git clone http://github.com/Subsurface/subsurface.git
|
||||
cd subsurface
|
||||
git submodule init # this will give you our flavor of libdivecomputer
|
||||
```
|
||||
|
||||
You keep it updated by doing:
|
||||
git checkout master
|
||||
git pull -r
|
||||
git submodule update
|
||||
|
||||
```
|
||||
git checkout master
|
||||
git pull -r
|
||||
git submodule update
|
||||
```
|
||||
|
||||
|
||||
Our flavor of libdivecomputer
|
||||
-----------------------------
|
||||
### Our flavor of libdivecomputer
|
||||
|
||||
Subsurface requires its own flavor of libdivecomputer which is inclduded
|
||||
above as git submodule
|
||||
@ -37,7 +40,7 @@ above as git submodule
|
||||
The branches won't have a pretty history and will include ugly merges,
|
||||
but they should always allow a fast forward pull that tracks what we
|
||||
believe developers should build against. All our patches are contained
|
||||
in the "Subsurface-DS9" branch.
|
||||
in the `Subsurface-DS9` branch.
|
||||
|
||||
This should allow distros to see which patches we have applied on top of
|
||||
upstream. They will receive force pushes as we rebase to newer versions of
|
||||
@ -53,8 +56,7 @@ Subsurface or trying to understand what we have done relative to their
|
||||
respective upstreams.
|
||||
|
||||
|
||||
Getting Qt5
|
||||
-----------
|
||||
### Getting Qt5
|
||||
|
||||
We use Qt5 in order to only maintain one UI across platforms.
|
||||
|
||||
@ -74,36 +76,41 @@ significantly reduced flexibility.
|
||||
As of this writing, there is thankfully a thirdparty offline installer still
|
||||
available:
|
||||
|
||||
pip3 install aqtinstall
|
||||
aqt install -O <Qt Location> 5.15.2 mac desktop
|
||||
```
|
||||
pip3 install aqtinstall
|
||||
aqt install -O <Qt Location> 5.15.2 mac desktop
|
||||
```
|
||||
|
||||
(or whatever version / OS you need). This installer is surprisingly fast
|
||||
and seems well maintained - note that we don't use this for Windows as
|
||||
that is completely built from source using MXE.
|
||||
|
||||
In order to use this Qt installation, simply add it to your PATH:
|
||||
|
||||
```
|
||||
PATH=<Qt Location>/<version>/<type>/bin:$PATH
|
||||
```
|
||||
|
||||
QtWebKit is needed, if you want to print, but no longer part of Qt5,
|
||||
so you need to download it and compile. In case you just want to test
|
||||
without print possibility omit this step.
|
||||
|
||||
git clone -b 5.212 https://github.com/qt/qtwebkit
|
||||
mkdir -p qtwebkit/WebKitBuild/Release
|
||||
cd qtwebkit/WebKitBuild/Release
|
||||
cmake -DPORT=Qt -DCMAKE_BUILD_TYPE=Release -DQt5_DIR=/<Qt Location>/<version>/<type>/lib/cmake/Qt5 ../..
|
||||
make install
|
||||
```
|
||||
git clone -b 5.212 https://github.com/qt/qtwebkit
|
||||
mkdir -p qtwebkit/WebKitBuild/Release
|
||||
cd qtwebkit/WebKitBuild/Release
|
||||
cmake -DPORT=Qt -DCMAKE_BUILD_TYPE=Release -DQt5_DIR=/<Qt Location>/<version>/<type>/lib/cmake/Qt5 ../..
|
||||
make install
|
||||
```
|
||||
|
||||
|
||||
Other third party library dependencies
|
||||
--------------------------------------
|
||||
### Other third party library dependencies
|
||||
|
||||
In order for our cloud storage to be fully functional you need
|
||||
libgit2 0.26 or newer.
|
||||
|
||||
|
||||
cmake build system
|
||||
------------------
|
||||
### cmake build system
|
||||
|
||||
Our main build system is based on cmake. But qmake is needed
|
||||
for the googlemaps plugin and the iOS build.
|
||||
@ -114,109 +121,127 @@ distribution (see build instructions).
|
||||
|
||||
|
||||
|
||||
Build options for Subsurface
|
||||
----------------------------
|
||||
## Build options for Subsurface
|
||||
|
||||
The following options are recognised when passed to cmake:
|
||||
|
||||
-DCMAKE_BUILD_TYPE=Release create a release build
|
||||
-DCMAKE_BUILD_TYPE=Debug create a debug build
|
||||
`-DCMAKE_BUILD_TYPE=Release` create a release build
|
||||
`-DCMAKE_BUILD_TYPE=Debug` create a debug build
|
||||
|
||||
The Makefile that was created using cmake can be forced into a much more
|
||||
verbose mode by calling
|
||||
|
||||
make VERBOSE=1
|
||||
```
|
||||
make VERBOSE=1
|
||||
```
|
||||
|
||||
Many more variables are supported, the easiest way to interact with them is
|
||||
to call
|
||||
|
||||
ccmake .
|
||||
```
|
||||
ccmake .
|
||||
```
|
||||
|
||||
in your build directory.
|
||||
|
||||
|
||||
Building the development version of Subsurface under Linux
|
||||
----------------------------------------------------------
|
||||
### Building the development version of Subsurface under Linux
|
||||
|
||||
On Fedora you need
|
||||
|
||||
```
|
||||
sudo dnf install autoconf automake bluez-libs-devel cmake gcc-c++ git \
|
||||
libcurl-devel libsqlite3x-devel libssh2-devel libtool libudev-devel \
|
||||
libusbx-devel libxml2-devel libxslt-devel make \
|
||||
qt5-qtbase-devel qt5-qtconnectivity-devel qt5-qtdeclarative-devel \
|
||||
qt5-qtlocation-devel qt5-qtscript-devel qt5-qtsvg-devel \
|
||||
qt5-qttools-devel qt5-qtwebkit-devel redhat-rpm-config \
|
||||
bluez-libs-devel libgit2-devel libzip-devel libmtp-devel
|
||||
libcurl-devel libsqlite3x-devel libssh2-devel libtool libudev-devel \
|
||||
libusbx-devel libxml2-devel libxslt-devel make \
|
||||
qt5-qtbase-devel qt5-qtconnectivity-devel qt5-qtdeclarative-devel \
|
||||
qt5-qtlocation-devel qt5-qtscript-devel qt5-qtsvg-devel \
|
||||
qt5-qttools-devel qt5-qtwebkit-devel redhat-rpm-config \
|
||||
bluez-libs-devel libgit2-devel libzip-devel libmtp-devel
|
||||
```
|
||||
|
||||
|
||||
Package names are sadly different on OpenSUSE
|
||||
|
||||
```
|
||||
sudo zypper install git gcc-c++ make autoconf automake libtool cmake libzip-devel \
|
||||
libxml2-devel libxslt-devel sqlite3-devel libusb-1_0-devel \
|
||||
libqt5-linguist-devel libqt5-qttools-devel libQt5WebKitWidgets-devel \
|
||||
libqt5-qtbase-devel libQt5WebKit5-devel libqt5-qtsvg-devel \
|
||||
libqt5-qtscript-devel libqt5-qtdeclarative-devel \
|
||||
libqt5-qtconnectivity-devel libqt5-qtlocation-devel libcurl-devel \
|
||||
bluez-devel libgit2-devel libmtp-devel
|
||||
libxml2-devel libxslt-devel sqlite3-devel libusb-1_0-devel \
|
||||
libqt5-linguist-devel libqt5-qttools-devel libQt5WebKitWidgets-devel \
|
||||
libqt5-qtbase-devel libQt5WebKit5-devel libqt5-qtsvg-devel \
|
||||
libqt5-qtscript-devel libqt5-qtdeclarative-devel \
|
||||
libqt5-qtconnectivity-devel libqt5-qtlocation-devel libcurl-devel \
|
||||
bluez-devel libgit2-devel libmtp-devel
|
||||
```
|
||||
|
||||
On Debian Bookworm this seems to work
|
||||
|
||||
```
|
||||
sudo apt install \
|
||||
autoconf automake cmake g++ git libbluetooth-dev libcrypto++-dev \
|
||||
libcurl4-openssl-dev libgit2-dev libqt5qml5 libqt5quick5 libqt5svg5-dev \
|
||||
libqt5webkit5-dev libsqlite3-dev libssh2-1-dev libssl-dev libtool \
|
||||
libusb-1.0-0-dev libxml2-dev libxslt1-dev libzip-dev make pkg-config \
|
||||
qml-module-qtlocation qml-module-qtpositioning qml-module-qtquick2 \
|
||||
qt5-qmake qtchooser qtconnectivity5-dev qtdeclarative5-dev \
|
||||
qtdeclarative5-private-dev qtlocation5-dev qtpositioning5-dev \
|
||||
qtscript5-dev qttools5-dev qttools5-dev-tools libmtp-dev
|
||||
autoconf automake cmake g++ git libbluetooth-dev libcrypto++-dev \
|
||||
libcurl4-openssl-dev libgit2-dev libqt5qml5 libqt5quick5 libqt5svg5-dev \
|
||||
libqt5webkit5-dev libsqlite3-dev libssh2-1-dev libssl-dev libtool \
|
||||
libusb-1.0-0-dev libxml2-dev libxslt1-dev libzip-dev make pkg-config \
|
||||
qml-module-qtlocation qml-module-qtpositioning qml-module-qtquick2 \
|
||||
qt5-qmake qtchooser qtconnectivity5-dev qtdeclarative5-dev \
|
||||
qtdeclarative5-private-dev qtlocation5-dev qtpositioning5-dev \
|
||||
qtscript5-dev qttools5-dev qttools5-dev-tools libmtp-dev
|
||||
```
|
||||
|
||||
In order to build and run mobile-on-desktop, you also need
|
||||
|
||||
```
|
||||
sudo apt install \
|
||||
qtquickcontrols2-5-dev qml-module-qtquick-window2 qml-module-qtquick-dialogs \
|
||||
qml-module-qtquick-layouts qml-module-qtquick-controls2 qml-module-qtquick-templates2 \
|
||||
qml-module-qtgraphicaleffects qml-module-qtqml-models2 qml-module-qtquick-controls
|
||||
qtquickcontrols2-5-dev qml-module-qtquick-window2 qml-module-qtquick-dialogs \
|
||||
qml-module-qtquick-layouts qml-module-qtquick-controls2 qml-module-qtquick-templates2 \
|
||||
qml-module-qtgraphicaleffects qml-module-qtqml-models2 qml-module-qtquick-controls
|
||||
```
|
||||
|
||||
|
||||
Package names for Ubuntu 21.04
|
||||
|
||||
```
|
||||
sudo apt install \
|
||||
autoconf automake cmake g++ git libbluetooth-dev libcrypto++-dev \
|
||||
libcurl4-gnutls-dev libgit2-dev libqt5qml5 libqt5quick5 libqt5svg5-dev \
|
||||
libqt5webkit5-dev libsqlite3-dev libssh2-1-dev libssl-dev libtool \
|
||||
libusb-1.0-0-dev libxml2-dev libxslt1-dev libzip-dev make pkg-config \
|
||||
qml-module-qtlocation qml-module-qtpositioning qml-module-qtquick2 \
|
||||
qt5-qmake qtchooser qtconnectivity5-dev qtdeclarative5-dev \
|
||||
qtdeclarative5-private-dev qtlocation5-dev qtpositioning5-dev \
|
||||
qtscript5-dev qttools5-dev qttools5-dev-tools libmtp-dev
|
||||
autoconf automake cmake g++ git libbluetooth-dev libcrypto++-dev \
|
||||
libcurl4-gnutls-dev libgit2-dev libqt5qml5 libqt5quick5 libqt5svg5-dev \
|
||||
libqt5webkit5-dev libsqlite3-dev libssh2-1-dev libssl-dev libtool \
|
||||
libusb-1.0-0-dev libxml2-dev libxslt1-dev libzip-dev make pkg-config \
|
||||
qml-module-qtlocation qml-module-qtpositioning qml-module-qtquick2 \
|
||||
qt5-qmake qtchooser qtconnectivity5-dev qtdeclarative5-dev \
|
||||
qtdeclarative5-private-dev qtlocation5-dev qtpositioning5-dev \
|
||||
qtscript5-dev qttools5-dev qttools5-dev-tools libmtp-dev
|
||||
```
|
||||
|
||||
In order to build and run mobile-on-desktop, you also need
|
||||
|
||||
```
|
||||
sudo apt install \
|
||||
qtquickcontrols2-5-dev qml-module-qtquick-window2 qml-module-qtquick-dialogs \
|
||||
qml-module-qtquick-layouts qml-module-qtquick-controls2 qml-module-qtquick-templates2 \
|
||||
qml-module-qtgraphicaleffects qml-module-qtqml-models2 qml-module-qtquick-controls
|
||||
qtquickcontrols2-5-dev qml-module-qtquick-window2 qml-module-qtquick-dialogs \
|
||||
qml-module-qtquick-layouts qml-module-qtquick-controls2 qml-module-qtquick-templates2 \
|
||||
qml-module-qtgraphicaleffects qml-module-qtqml-models2 qml-module-qtquick-controls
|
||||
```
|
||||
|
||||
|
||||
On Raspberry Pi (Raspian Buster and Ubuntu Mate 20.04.1) this seems to work
|
||||
|
||||
```
|
||||
sudo apt install \
|
||||
autoconf automake cmake g++ git libbluetooth-dev libcrypto++-dev \
|
||||
libcurl4-gnutls-dev libgit2-dev libqt5qml5 libqt5quick5 libqt5svg5-dev \
|
||||
libqt5webkit5-dev libsqlite3-dev libssh2-1-dev libssl-dev libtool \
|
||||
libusb-1.0-0-dev libxml2-dev libxslt1-dev libzip-dev make pkg-config \
|
||||
qml-module-qtlocation qml-module-qtpositioning qml-module-qtquick2 \
|
||||
qt5-qmake qtchooser qtconnectivity5-dev qtdeclarative5-dev \
|
||||
qtdeclarative5-private-dev qtlocation5-dev qtpositioning5-dev \
|
||||
qtscript5-dev qttools5-dev qttools5-dev-tools libmtp-dev
|
||||
autoconf automake cmake g++ git libbluetooth-dev libcrypto++-dev \
|
||||
libcurl4-gnutls-dev libgit2-dev libqt5qml5 libqt5quick5 libqt5svg5-dev \
|
||||
libqt5webkit5-dev libsqlite3-dev libssh2-1-dev libssl-dev libtool \
|
||||
libusb-1.0-0-dev libxml2-dev libxslt1-dev libzip-dev make pkg-config \
|
||||
qml-module-qtlocation qml-module-qtpositioning qml-module-qtquick2 \
|
||||
qt5-qmake qtchooser qtconnectivity5-dev qtdeclarative5-dev \
|
||||
qtdeclarative5-private-dev qtlocation5-dev qtpositioning5-dev \
|
||||
qtscript5-dev qttools5-dev qttools5-dev-tools libmtp-dev
|
||||
```
|
||||
|
||||
In order to build and run mobile-on-desktop, you also need
|
||||
|
||||
```
|
||||
sudo apt install \
|
||||
qtquickcontrols2-5-dev qml-module-qtquick-window2 qml-module-qtquick-dialogs \
|
||||
qml-module-qtquick-layouts qml-module-qtquick-controls2 qml-module-qtquick-templates2 \
|
||||
qml-module-qtgraphicaleffects qml-module-qtqml-models2 qml-module-qtquick-controls
|
||||
qtquickcontrols2-5-dev qml-module-qtquick-window2 qml-module-qtquick-dialogs \
|
||||
qml-module-qtquick-layouts qml-module-qtquick-controls2 qml-module-qtquick-templates2 \
|
||||
qml-module-qtgraphicaleffects qml-module-qtqml-models2 qml-module-qtquick-controls
|
||||
```
|
||||
|
||||
|
||||
Note that on Ubuntu Mate on the Raspberry Pi, you may need to configure
|
||||
@ -226,42 +251,46 @@ swap space configured by default. See the dphys-swapfile package.
|
||||
On Raspberry Pi OS with Desktop (64-bit) Released April 4th, 2022, this seems
|
||||
to work
|
||||
|
||||
```
|
||||
sudo apt install \
|
||||
autoconf automake cmake g++ git libbluetooth-dev libcrypto++-dev \
|
||||
libcurl4-gnutls-dev libgit2-dev libqt5qml5 libqt5quick5 libqt5svg5-dev \
|
||||
libqt5webkit5-dev libsqlite3-dev libssh2-1-dev libssl-dev libtool \
|
||||
libusb-1.0-0-dev libxml2-dev libxslt1-dev libzip-dev make pkg-config \
|
||||
qml-module-qtlocation qml-module-qtpositioning qml-module-qtquick2 \
|
||||
qt5-qmake qtchooser qtconnectivity5-dev qtdeclarative5-dev \
|
||||
qtdeclarative5-private-dev qtlocation5-dev qtpositioning5-dev \
|
||||
qtscript5-dev qttools5-dev qttools5-dev-tools libmtp-dev
|
||||
autoconf automake cmake g++ git libbluetooth-dev libcrypto++-dev \
|
||||
libcurl4-gnutls-dev libgit2-dev libqt5qml5 libqt5quick5 libqt5svg5-dev \
|
||||
libqt5webkit5-dev libsqlite3-dev libssh2-1-dev libssl-dev libtool \
|
||||
libusb-1.0-0-dev libxml2-dev libxslt1-dev libzip-dev make pkg-config \
|
||||
qml-module-qtlocation qml-module-qtpositioning qml-module-qtquick2 \
|
||||
qt5-qmake qtchooser qtconnectivity5-dev qtdeclarative5-dev \
|
||||
qtdeclarative5-private-dev qtlocation5-dev qtpositioning5-dev \
|
||||
qtscript5-dev qttools5-dev qttools5-dev-tools libmtp-dev
|
||||
```
|
||||
|
||||
Note that you'll need to increase the swap space as the default of 100MB
|
||||
doesn't seem to be enough. 1024MB worked on a 3B+.
|
||||
|
||||
If maps aren't working, copy the googlemaps plugin
|
||||
from <build_dir>/subsurface/googlemaps/build/libqtgeoservices_googlemaps.so
|
||||
to /usr/lib/aarch64-linux-gnu/qt5/plugins/geoservices.
|
||||
from `<build_dir>/subsurface/googlemaps/build/libqtgeoservices_googlemaps.so`
|
||||
to `/usr/lib/aarch64-linux-gnu/qt5/plugins/geoservices/`.
|
||||
|
||||
If Subsurface can't seem to see your dive computer on /dev/ttyUSB0, even after
|
||||
If Subsurface can't seem to see your dive computer on `/dev/ttyUSB0`, even after
|
||||
adjusting your account's group settings (see note below about usermod), it
|
||||
might be that the FTDI driver doesn't recognize the VendorID/ProductID of your
|
||||
computer. Follow the instructions here:
|
||||
computer. Follow the instructions here:
|
||||
|
||||
https://www.ftdichip.com/Support/Documents/TechnicalNotes/TN_101_Customising_FTDI_VID_PID_In_Linux(FT_000081).pdf
|
||||
https://www.ftdichip.com/Support/Documents/TechnicalNotes/TN_101_Customising_FTDI_VID_PID_In_Linux(FT_000081).pdf
|
||||
|
||||
If you're unsure of the VID/PID of your device, plug your dive computer in to
|
||||
your host and run `dmesg`. That should show the codes that are needed to
|
||||
your host and run `dmesg`. That should show the codes that are needed to
|
||||
follow TN_101.
|
||||
|
||||
On PCLinuxOS you appear to need the following packages
|
||||
|
||||
su -c "apt-get install -y autoconf automake cmake gcc-c++ git libtool \
|
||||
lib64bluez-devel lib64qt5bluetooth-devel lib64qt5concurrent-devel \
|
||||
lib64qt5help-devel lib64qt5location-devel lib64qt5quicktest-devel \
|
||||
lib64qt5quickwidgets-devel lib64qt5script-devel lib64qt5svg-devel \
|
||||
lib64qt5test-devel lib64qt5webkitwidgets-devel lib64qt5xml-devel \
|
||||
lib64ssh2-devel lib64usb1.0-devel lib64zip-devel qttools5 qttranslations5"
|
||||
```
|
||||
su -c "apt-get install -y autoconf automake cmake gcc-c++ git libtool \
|
||||
lib64bluez-devel lib64qt5bluetooth-devel lib64qt5concurrent-devel \
|
||||
lib64qt5help-devel lib64qt5location-devel lib64qt5quicktest-devel \
|
||||
lib64qt5quickwidgets-devel lib64qt5script-devel lib64qt5svg-devel \
|
||||
lib64qt5test-devel lib64qt5webkitwidgets-devel lib64qt5xml-devel \
|
||||
lib64ssh2-devel lib64usb1.0-devel lib64zip-devel qttools5 qttranslations5"
|
||||
```
|
||||
|
||||
In order to build Subsurface, use the supplied build script. This should
|
||||
work on most systems that have all the prerequisite packages installed.
|
||||
@ -269,109 +298,121 @@ work on most systems that have all the prerequisite packages installed.
|
||||
You should have Subsurface sources checked out in a sane place, something
|
||||
like this:
|
||||
|
||||
```
|
||||
mkdir -p ~/src
|
||||
cd ~/src
|
||||
git clone https://github.com/Subsurface/subsurface.git
|
||||
./subsurface/scripts/build.sh # <- this step will take quite a while as it
|
||||
# compiles a handful of libraries before
|
||||
# building Subsurface
|
||||
# building Subsurface
|
||||
```
|
||||
|
||||
Now you can run Subsurface like this:
|
||||
|
||||
```
|
||||
cd ~/src/subsurface/build
|
||||
./subsurface
|
||||
```
|
||||
|
||||
|
||||
Note: on many Linux versions (for example on Kubuntu 15.04) the user must
|
||||
belong to the dialout group.
|
||||
belong to the `dialout` group.
|
||||
|
||||
You may need to run something like
|
||||
|
||||
sudo usermod -a -G dialout username
|
||||
```
|
||||
sudo usermod -a -G dialout $USER
|
||||
```
|
||||
|
||||
with your correct username and log out and log in again for that to take
|
||||
effect.
|
||||
|
||||
If you get errors like:
|
||||
|
||||
```
|
||||
./subsurface: error while loading shared libraries: libGrantlee_Templates.so.5: cannot open shared object file: No such file or directory
|
||||
```
|
||||
|
||||
You can run the following command:
|
||||
|
||||
```
|
||||
sudo ldconfig ~/src/install-root/lib
|
||||
```
|
||||
|
||||
|
||||
Building Subsurface under MacOSX
|
||||
--------------------------------
|
||||
### Building Subsurface under MacOSX
|
||||
|
||||
While it is possible to build all required components completely from source,
|
||||
at this point the preferred way to build Subsurface is to set up the build
|
||||
infrastructure via Homebrew and then build the dependencies from source.
|
||||
|
||||
0) You need to have XCode installed. The first time (and possibly after updating OSX)
|
||||
0. You need to have XCode installed. The first time (and possibly after updating OSX)
|
||||
|
||||
```
|
||||
xcode-select --install
|
||||
```
|
||||
|
||||
1) install Homebrew (see https://brew.sh) and then the required build infrastructure:
|
||||
1. install Homebrew (see https://brew.sh) and then the required build infrastructure:
|
||||
|
||||
```
|
||||
brew install autoconf automake libtool pkg-config gettext
|
||||
```
|
||||
|
||||
2) install Qt
|
||||
2. install Qt
|
||||
|
||||
download the macOS installer from https://download.qt.io/official_releases/online_installers
|
||||
and use it to install the desired Qt version. At this point the latest Qt5 version is still
|
||||
preferred over Qt6.
|
||||
|
||||
3) now build Subsurface
|
||||
If you plan to deploy your build to an Apple Silicon Mac, you may have better results with
|
||||
Bluetooth connections if you install Qt5.15.13. If Qt5.15.13 is not available via the
|
||||
installer, you can download from https://download.qt.io/official_releases/qt/5.15/5.15.13
|
||||
and build using the usual configure, make, and make install.
|
||||
|
||||
3. now build Subsurface
|
||||
|
||||
```
|
||||
cd ~/src; bash subsurface/scripts/build.sh -build-deps
|
||||
```
|
||||
|
||||
if you are building against Qt6 (still experimental) you can create a universal binary with
|
||||
|
||||
```
|
||||
cd ~/src; bash subsurface/scripts/build.sh -build-with-qt6 -build-deps -fat-build
|
||||
```
|
||||
|
||||
After the above is done, Subsurface.app will be available in the
|
||||
subsurface/build directory. You can run Subsurface with the command
|
||||
|
||||
A) open subsurface/build/Subsurface.app
|
||||
A. `open subsurface/build/Subsurface.app`
|
||||
this will however not show diagnostic output
|
||||
|
||||
B) subsurface/build/Subsurface.app/Contents/MacOS/Subsurface
|
||||
the TAB key is your friend :-)
|
||||
B. `subsurface/build/Subsurface.app/Contents/MacOS/Subsurface`
|
||||
the [Tab] key is your friend :-)
|
||||
|
||||
Debugging can be done with either Xcode or QtCreator.
|
||||
|
||||
To install the app for all users, move subsurface/build/Subsurface.app to /Applications.
|
||||
|
||||
|
||||
Cross-building Subsurface on MacOSX for iOS
|
||||
-------------------------------------------
|
||||
### Cross-building Subsurface on MacOSX for iOS
|
||||
|
||||
1) build SubSurface under MacOSX and iOS
|
||||
0. build SubSurface under MacOSX and iOS
|
||||
|
||||
1.1) cd <repo>/..; bash <repo>/scripts/build.sh -build-deps -both
|
||||
1. `cd <repo>/..; bash <repo>/scripts/build.sh -build-deps -both`
|
||||
note: this is mainly done to ensure all external dependencies are downloaded and set
|
||||
to the correct versions
|
||||
|
||||
2) continue as described in subsurface/packaging/ios
|
||||
2. follow [these instructions](packaging/ios/README.md)
|
||||
|
||||
|
||||
|
||||
Cross-building Subsurface on Linux for Windows
|
||||
----------------------------------------------
|
||||
### Cross-building Subsurface on Linux for Windows
|
||||
|
||||
Subsurface builds nicely with MinGW - the official builds are done as
|
||||
cross builds under Linux (currently on Ubuntu 20.04). A shell script to do
|
||||
that (plus the .nsi file to create the installer with makensis) are
|
||||
included in the packaging/windows directory.
|
||||
Subsurface for Windows builds on linux by using the [MXE (M cross environment)](https://github.com/mxe/mxe). The easiest way to do this is to use a Docker container with a pre-built MXE for Subsurface by following [these instructions](packaging/windows/README.md).
|
||||
|
||||
Please read through the explanations and instructions in
|
||||
packaging/windows/README.md, packaging/windows/create-win-installer.sh, and
|
||||
packaging/windows/mxe-based-build.sh if you want to build the Windows version
|
||||
on your Linux system.
|
||||
|
||||
Building Subsurface on Windows
|
||||
------------------------------
|
||||
### Building Subsurface on Windows
|
||||
|
||||
This is NOT RECOMMENDED. To the best of our knowledge there is one single
|
||||
person who regularly does this. The Subsurface team does not provide support
|
||||
@ -381,8 +422,9 @@ The lack of a working package management system for Windows makes it
|
||||
really painful to build Subsurface natively under Windows,
|
||||
so we don't support that at all.
|
||||
|
||||
But if you want to build Subsurface on a Windows system, the docker based [cross-build for Windows](packaging/windows/README.md) works just fine in WSL2 on Windows.
|
||||
|
||||
Cross-building Subsurface on Linux for Android
|
||||
----------------------------------------------
|
||||
|
||||
Follow the instructions in packaging/android/README
|
||||
### Cross-building Subsurface on Linux for Android
|
||||
|
||||
Follow [these instructions](packaging/android/README.md).
|
||||
46
README.md
46
README.md
@ -1,20 +1,17 @@
|
||||
# Subsurface
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||
[](https://github.com/subsurface/subsurface/actions/workflows/windows.yml)
|
||||
[](https://github.com/subsurface/subsurface/actions/workflows/mac.yml)
|
||||
[](https://github.com/subsurface/subsurface/actions/workflows/ios.yml)
|
||||
[](https://github.com/subsurface/subsurface/actions/workflows/android.yml)
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||

|
||||
[](https://github.com/subsurface/subsurface/actions/workflows/linux-snap.yml)
|
||||
[](https://github.com/subsurface/subsurface/actions/workflows/linux-ubuntu-16.04-5.12-appimage.yml)
|
||||
[](https://github.com/subsurface/subsurface/actions/workflows/linux-ubuntu-24.04-5.15.yml)
|
||||
[](https://github.com/subsurface/subsurface/actions/workflows/linux-fedora-35-qt6.yml)
|
||||
[](https://github.com/subsurface/subsurface/actions/workflows/linux-debian-trixie-5.15.yml)
|
||||
|
||||
This is the README file for Subsurface 5.0.10
|
||||
|
||||
Please check the `ReleaseNotes.txt` for details about new features and
|
||||
changes since Subsurface 5.0.9 (and earlier versions).
|
||||
[](https://scan.coverity.com/projects/subsurface-divelog-subsurface)
|
||||
|
||||
Subsurface can be found at http://subsurface-divelog.org
|
||||
|
||||
@ -24,16 +21,9 @@ Report bugs and issues at https://github.com/Subsurface/subsurface/issues
|
||||
|
||||
License: GPLv2
|
||||
|
||||
We frequently make new test versions of Subsurface available at
|
||||
http://subsurface-divelog.org/downloads/test/ and there you can always get
|
||||
the latest builds for Mac, Windows, Linux AppImage and Android (with some
|
||||
caveats about installability). Additionally, those same versions are
|
||||
We are releasing 'nightly' builds of Subsurface that are built from the latest version of the code. Versions of this build for Windows, macOS, Android (requiring sideloading), and a Linux AppImage can be downloaded from the [Latest Dev Release](https://www.subsurface-divelog.org/latest-release/) page on [our website](https://www.subsurface-divelog.org/). Alternatively, they can be downloaded [directly from GitHub](https://github.com/subsurface/nightly-builds/releases). Additionally, those same versions are
|
||||
posted to the Subsurface-daily repos on Ubuntu Launchpad, Fedora COPR, and
|
||||
OpenSUSE OBS.
|
||||
|
||||
These tend to contain the latest bug fixes and features, but also
|
||||
occasionally the latest bugs and issues. Please understand when using them
|
||||
that these are primarily intended for testing.
|
||||
OpenSUSE OBS, and released to [Snapcraft](https://snapcraft.io/subsurface) into the 'edge' channel of subsurface.
|
||||
|
||||
You can get the sources to the latest development version from the git
|
||||
repository:
|
||||
@ -45,17 +35,11 @@ git clone https://github.com/Subsurface/subsurface.git
|
||||
You can also fork the repository and browse the sources at the same site,
|
||||
simply using https://github.com/Subsurface/subsurface
|
||||
|
||||
If you want the latest release (instead of the bleeding edge
|
||||
development version) you can either get this via git or the release tar
|
||||
ball. After cloning run the following command:
|
||||
Additionally, artifacts for Windows, macOS, Android, Linux AppImage, and iOS (simulator build) are generated for all open pull requests and linked in pull request comments. Use these if you want to test the changes in a specific pull request and provide feedback before it has been merged.
|
||||
|
||||
```
|
||||
git checkout v5.0.10 (or whatever the last release is)
|
||||
```
|
||||
If you want a more stable version that is a little bit more tested you can get this from the [Curent Release](https://www.subsurface-divelog.org/current-release/) page on [our website](https://www.subsurface-divelog.org/).
|
||||
|
||||
or download a tarball from http://subsurface-divelog.org/downloads/Subsurface-5.0.10.tgz
|
||||
|
||||
Detailed build instructions can be found in the INSTALL file.
|
||||
Detailed build instructions can be found in the [INSTALL.md](/INSTALL.md) file.
|
||||
|
||||
## System Requirements
|
||||
|
||||
|
||||
@ -25,7 +25,7 @@ SOURCES += subsurface-mobile-main.cpp \
|
||||
core/devicedetails.cpp \
|
||||
core/downloadfromdcthread.cpp \
|
||||
core/qtserialbluetooth.cpp \
|
||||
core/plannernotes.c \
|
||||
core/plannernotes.cpp \
|
||||
core/uemis-downloader.cpp \
|
||||
core/qthelper.cpp \
|
||||
core/checkcloudconnection.cpp \
|
||||
@ -33,7 +33,7 @@ SOURCES += subsurface-mobile-main.cpp \
|
||||
core/configuredivecomputer.cpp \
|
||||
core/divelogexportlogic.cpp \
|
||||
core/divesitehelpers.cpp \
|
||||
core/errorhelper.c \
|
||||
core/errorhelper.cpp \
|
||||
core/exif.cpp \
|
||||
core/format.cpp \
|
||||
core/gettextfromc.cpp \
|
||||
@ -44,8 +44,9 @@ SOURCES += subsurface-mobile-main.cpp \
|
||||
core/file.cpp \
|
||||
core/fulltext.cpp \
|
||||
core/subsurfacestartup.cpp \
|
||||
core/subsurface-string.cpp \
|
||||
core/pref.c \
|
||||
core/profile.c \
|
||||
core/profile.cpp \
|
||||
core/device.cpp \
|
||||
core/dive.cpp \
|
||||
core/divecomputer.c \
|
||||
@ -73,18 +74,18 @@ SOURCES += subsurface-mobile-main.cpp \
|
||||
core/import-cobalt.cpp \
|
||||
core/import-divinglog.cpp \
|
||||
core/import-csv.cpp \
|
||||
core/save-html.c \
|
||||
core/save-html.cpp \
|
||||
core/statistics.c \
|
||||
core/worldmap-save.c \
|
||||
core/worldmap-save.cpp \
|
||||
core/libdivecomputer.cpp \
|
||||
core/version.c \
|
||||
core/save-git.cpp \
|
||||
core/datatrak.cpp \
|
||||
core/ostctools.c \
|
||||
core/planner.c \
|
||||
core/planner.cpp \
|
||||
core/save-xml.cpp \
|
||||
core/cochran.cpp \
|
||||
core/deco.c \
|
||||
core/deco.cpp \
|
||||
core/divesite.c \
|
||||
core/equipment.c \
|
||||
core/gas.c \
|
||||
@ -93,7 +94,7 @@ SOURCES += subsurface-mobile-main.cpp \
|
||||
core/sha1.c \
|
||||
core/string-format.cpp \
|
||||
core/strtod.c \
|
||||
core/tag.c \
|
||||
core/tag.cpp \
|
||||
core/taxonomy.c \
|
||||
core/time.cpp \
|
||||
core/trip.c \
|
||||
@ -244,7 +245,6 @@ HEADERS += \
|
||||
core/sample.h \
|
||||
core/selection.h \
|
||||
core/sha1.h \
|
||||
core/strndup.h \
|
||||
core/string-format.h \
|
||||
core/subsurfacestartup.h \
|
||||
core/subsurfacesysinfo.h \
|
||||
|
||||
@ -181,7 +181,7 @@ void export_TeX(const char *filename, bool selected_only, bool plain, ExportCall
|
||||
site ? put_format(&buf, "\\def\\%sgpslon{%f}\n", ssrf, site->location.lon.udeg / 1000000.0) : put_format(&buf, "\\def\\gpslon{}\n");
|
||||
put_format(&buf, "\\def\\%scomputer{%s}\n", ssrf, dive->dc.model);
|
||||
put_format(&buf, "\\def\\%scountry{%s}\n", ssrf, country ?: "");
|
||||
put_format(&buf, "\\def\\%stime{%u:%02u}\n", ssrf, FRACTION(dive->duration.seconds, 60));
|
||||
put_format(&buf, "\\def\\%stime{%u:%02u}\n", ssrf, FRACTION_TUPLE(dive->duration.seconds, 60));
|
||||
|
||||
put_format(&buf, "\n%% Dive Profile Details:\n");
|
||||
dive->maxtemp.mkelvin ? put_format(&buf, "\\def\\%smaxtemp{%.1f\\%stemperatureunit}\n", ssrf, get_temp_units(dive->maxtemp.mkelvin, &unit), ssrf) : put_format(&buf, "\\def\\%smaxtemp{}\n", ssrf);
|
||||
@ -191,14 +191,8 @@ void export_TeX(const char *filename, bool selected_only, bool plain, ExportCall
|
||||
dive->maxdepth.mm ? put_format(&buf, "\\def\\%smaximumdepth{%.1f\\%sdepthunit}\n", ssrf, get_depth_units(dive->maxdepth.mm, NULL, &unit), ssrf) : put_format(&buf, "\\def\\%smaximumdepth{}\n", ssrf);
|
||||
dive->meandepth.mm ? put_format(&buf, "\\def\\%smeandepth{%.1f\\%sdepthunit}\n", ssrf, get_depth_units(dive->meandepth.mm, NULL, &unit), ssrf) : put_format(&buf, "\\def\\%smeandepth{}\n", ssrf);
|
||||
|
||||
struct tag_entry *tag = dive->tag_list;
|
||||
QString tags;
|
||||
if (tag) {
|
||||
tags = tag->tag->name;
|
||||
while ((tag = tag->next))
|
||||
tags += QString(", ") + QString(tag->tag->name);
|
||||
}
|
||||
put_format(&buf, "\\def\\%stype{%s}\n", ssrf, qPrintable(tags));
|
||||
std::string tags = taglist_get_tagstring(dive->tag_list);
|
||||
put_format(&buf, "\\def\\%stype{%s}\n", ssrf, tags.c_str());
|
||||
put_format(&buf, "\\def\\%sviz{%s}\n", ssrf, qPrintable(viz));
|
||||
put_format(&buf, "\\def\\%srating{%s}\n", ssrf, qPrintable(rating));
|
||||
put_format(&buf, "\\def\\%splot{\\includegraphics[width=9cm,height=4cm]{profile%d}}\n", ssrf, dive->number);
|
||||
|
||||
@ -1,20 +1,20 @@
|
||||
|
||||
execute_process(
|
||||
COMMAND bash ${CMAKE_TOP_SRC_DIR}/scripts/get-version 4
|
||||
COMMAND bash ${CMAKE_TOP_SRC_DIR}/scripts/get-version.sh 4
|
||||
WORKING_DIRECTORY ${CMAKE_TOP_SRC_DIR}
|
||||
OUTPUT_VARIABLE CANONICAL_VERSION_STRING_4
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
)
|
||||
|
||||
execute_process(
|
||||
COMMAND bash ${CMAKE_TOP_SRC_DIR}/scripts/get-version 3
|
||||
COMMAND bash ${CMAKE_TOP_SRC_DIR}/scripts/get-version.sh 3
|
||||
WORKING_DIRECTORY ${CMAKE_TOP_SRC_DIR}
|
||||
OUTPUT_VARIABLE CANONICAL_VERSION_STRING_3
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
)
|
||||
|
||||
execute_process(
|
||||
COMMAND bash ${CMAKE_TOP_SRC_DIR}/scripts/get-version
|
||||
COMMAND bash ${CMAKE_TOP_SRC_DIR}/scripts/get-version.sh
|
||||
WORKING_DIRECTORY ${CMAKE_TOP_SRC_DIR}
|
||||
OUTPUT_VARIABLE CANONICAL_VERSION_STRING
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
|
||||
@ -100,6 +100,7 @@ enum class EditProfileType {
|
||||
ADD,
|
||||
REMOVE,
|
||||
MOVE,
|
||||
EDIT,
|
||||
};
|
||||
void replanDive(dive *d); // dive computer(s) and cylinder(s) of first argument will be consumed!
|
||||
void editProfile(const dive *d, int dcNr, EditProfileType type, int count);
|
||||
|
||||
@ -521,6 +521,11 @@ ImportDives::ImportDives(struct divelog *log, int flags, const QString &source)
|
||||
continue;
|
||||
filterPresetsToAdd.emplace_back(preset.name, preset.data);
|
||||
}
|
||||
|
||||
free(dives_to_add.dives);
|
||||
free(dives_to_remove.dives);
|
||||
free(trips_to_add.trips);
|
||||
free(sites_to_add.dive_sites);
|
||||
}
|
||||
|
||||
bool ImportDives::workToBeDone()
|
||||
|
||||
@ -566,7 +566,7 @@ QStringList EditTags::data(struct dive *d) const
|
||||
{
|
||||
QStringList res;
|
||||
for (const struct tag_entry *tag = d->tag_list; tag; tag = tag->next)
|
||||
res.push_back(tag->tag->name);
|
||||
res.push_back(QString::fromStdString(tag->tag->name));
|
||||
return res;
|
||||
}
|
||||
|
||||
@ -879,6 +879,7 @@ QString editProfileTypeToString(EditProfileType type, int count)
|
||||
case EditProfileType::ADD: return Command::Base::tr("Add stop");
|
||||
case EditProfileType::REMOVE: return Command::Base::tr("Remove %n stop(s)", "", count);
|
||||
case EditProfileType::MOVE: return Command::Base::tr("Move %n stop(s)", "", count);
|
||||
case EditProfileType::EDIT: return Command::Base::tr("Edit stop");
|
||||
}
|
||||
}
|
||||
|
||||
@ -904,7 +905,7 @@ EditProfile::EditProfile(const dive *source, int dcNr, EditProfileType type, int
|
||||
copy_samples(sdc, &dc);
|
||||
copy_events(sdc, &dc);
|
||||
|
||||
setText(editProfileTypeToString(type, count) + diveNumberOrDate(d));
|
||||
setText(editProfileTypeToString(type, count) + " " + diveNumberOrDate(d));
|
||||
}
|
||||
|
||||
EditProfile::~EditProfile()
|
||||
@ -925,6 +926,7 @@ void EditProfile::undo()
|
||||
std::swap(sdc->samples, dc.samples);
|
||||
std::swap(sdc->alloc_samples, dc.alloc_samples);
|
||||
std::swap(sdc->sample, dc.sample);
|
||||
std::swap(sdc->events, dc.events);
|
||||
std::swap(sdc->maxdepth, dc.maxdepth);
|
||||
std::swap(d->maxdepth, maxdepth);
|
||||
std::swap(d->meandepth, meandepth);
|
||||
@ -1125,7 +1127,7 @@ AddCylinder::AddCylinder(bool currentDiveOnly) :
|
||||
setText(Command::Base::tr("Add cylinder"));
|
||||
else
|
||||
setText(Command::Base::tr("Add cylinder (%n dive(s))", "", dives.size()));
|
||||
cyl = create_new_cylinder(dives[0]);
|
||||
cyl = create_new_manual_cylinder(dives[0]);
|
||||
indexes.reserve(dives.size());
|
||||
}
|
||||
|
||||
@ -1317,8 +1319,7 @@ EditCylinder::EditCylinder(int index, cylinder_t cylIn, EditCylinderType typeIn,
|
||||
void EditCylinder::redo()
|
||||
{
|
||||
for (size_t i = 0; i < dives.size(); ++i) {
|
||||
set_tank_info_size(&tank_info_table, cyl[i].type.description, cyl[i].type.size);
|
||||
set_tank_info_workingpressure(&tank_info_table, cyl[i].type.description, cyl[i].type.workingpressure);
|
||||
set_tank_info_data(&tank_info_table, cyl[i].type.description, cyl[i].type.size, cyl[i].type.workingpressure);
|
||||
std::swap(*get_cylinder(dives[i], indexes[i]), cyl[i]);
|
||||
update_cylinder_related_info(dives[i]);
|
||||
emit diveListNotifier.cylinderEdited(dives[i], indexes[i]);
|
||||
@ -1426,7 +1427,7 @@ EditDive::EditDive(dive *oldDiveIn, dive *newDiveIn, dive_site *createDs, dive_s
|
||||
changedFields |= DiveField::CHILL;
|
||||
if (!same_string(oldDive->suit, newDive->suit))
|
||||
changedFields |= DiveField::SUIT;
|
||||
if (get_taglist_string(oldDive->tag_list) != get_taglist_string(newDive->tag_list)) // This is cheating. Do we have a taglist comparison function?
|
||||
if (taglist_get_tagstring(oldDive->tag_list) != taglist_get_tagstring(newDive->tag_list)) // This is cheating. Do we have a taglist comparison function?
|
||||
changedFields |= DiveField::TAGS;
|
||||
if (oldDive->dc.divemode != newDive->dc.divemode)
|
||||
changedFields |= DiveField::MODE;
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
// SPDX-License-Identifier: GPL-2.0
|
||||
|
||||
#include "command_pictures.h"
|
||||
#include "core/errorhelper.h"
|
||||
#include "core/subsurface-qt/divelistnotifier.h"
|
||||
#include "qt-models/divelocationmodel.h"
|
||||
|
||||
@ -24,7 +25,7 @@ void SetPictureOffset::redo()
|
||||
{
|
||||
picture *pic = dive_get_picture(d, filename);
|
||||
if (!pic) {
|
||||
fprintf(stderr, "SetPictureOffset::redo(): picture disappeared!\n");
|
||||
report_info("SetPictureOffset::redo(): picture disappeared!");
|
||||
return;
|
||||
}
|
||||
std::swap(pic->offset, offset);
|
||||
@ -73,7 +74,7 @@ static std::vector<PictureListForAddition> removePictures(std::vector<PictureLis
|
||||
for (const std::string &fn: list.filenames) {
|
||||
int idx = get_picture_idx(&list.d->pictures, fn.c_str());
|
||||
if (idx < 0) {
|
||||
fprintf(stderr, "removePictures(): picture disappeared!\n");
|
||||
report_info("removePictures(): picture disappeared!");
|
||||
continue; // Huh? We made sure that this can't happen by filtering out non-existent pictures.
|
||||
}
|
||||
filenames.push_back(QString::fromStdString(fn));
|
||||
@ -103,7 +104,7 @@ static std::vector<PictureListForDeletion> addPictures(std::vector<PictureListFo
|
||||
for (const PictureObj &pic: list.pics) {
|
||||
int idx = get_picture_idx(&list.d->pictures, pic.filename.c_str()); // This should *not* already exist!
|
||||
if (idx >= 0) {
|
||||
fprintf(stderr, "addPictures(): picture disappeared!\n");
|
||||
report_info("addPictures(): picture disappeared!");
|
||||
continue; // Huh? We made sure that this can't happen by filtering out existing pictures.
|
||||
}
|
||||
picsForSignal.push_back(pic);
|
||||
|
||||
@ -53,7 +53,7 @@ set(SUBSURFACE_CORE_LIB_SRCS
|
||||
connectionlistmodel.h
|
||||
datatrak.cpp
|
||||
datatrak.h
|
||||
deco.c
|
||||
deco.cpp
|
||||
deco.h
|
||||
device.cpp
|
||||
device.h
|
||||
@ -85,7 +85,7 @@ set(SUBSURFACE_CORE_LIB_SRCS
|
||||
eventtype.h
|
||||
equipment.c
|
||||
equipment.h
|
||||
errorhelper.c
|
||||
errorhelper.cpp
|
||||
exif.cpp
|
||||
exif.h
|
||||
extradata.h
|
||||
@ -141,12 +141,12 @@ set(SUBSURFACE_CORE_LIB_SRCS
|
||||
picture.h
|
||||
pictureobj.cpp
|
||||
pictureobj.h
|
||||
planner.c
|
||||
planner.cpp
|
||||
planner.h
|
||||
plannernotes.c
|
||||
plannernotes.cpp
|
||||
pref.h
|
||||
pref.c
|
||||
profile.c
|
||||
profile.cpp
|
||||
profile.h
|
||||
qt-gui.h
|
||||
qt-init.cpp
|
||||
@ -156,7 +156,7 @@ set(SUBSURFACE_CORE_LIB_SRCS
|
||||
sample.cpp
|
||||
sample.h
|
||||
save-git.cpp
|
||||
save-html.c
|
||||
save-html.cpp
|
||||
save-html.h
|
||||
save-profiledata.c
|
||||
save-xml.cpp
|
||||
@ -167,17 +167,17 @@ set(SUBSURFACE_CORE_LIB_SRCS
|
||||
ssrf.h
|
||||
statistics.c
|
||||
statistics.h
|
||||
strndup.h
|
||||
string-format.h
|
||||
string-format.cpp
|
||||
strtod.c
|
||||
subsurface-float.h
|
||||
subsurface-string.cpp
|
||||
subsurface-string.h
|
||||
subsurfacestartup.cpp
|
||||
subsurfacestartup.h
|
||||
subsurfacesysinfo.cpp
|
||||
subsurfacesysinfo.h
|
||||
tag.c
|
||||
tag.cpp
|
||||
tag.h
|
||||
taxonomy.c
|
||||
taxonomy.h
|
||||
@ -203,7 +203,7 @@ set(SUBSURFACE_CORE_LIB_SRCS
|
||||
windowtitleupdate.cpp
|
||||
windowtitleupdate.h
|
||||
worldmap-options.h
|
||||
worldmap-save.c
|
||||
worldmap-save.cpp
|
||||
worldmap-save.h
|
||||
xmlparams.cpp
|
||||
xmlparams.h
|
||||
|
||||
@ -2,9 +2,9 @@
|
||||
|
||||
#include "btdiscovery.h"
|
||||
#include "downloadfromdcthread.h"
|
||||
#include "core/libdivecomputer.h"
|
||||
#include "libdivecomputer.h"
|
||||
#include "errorhelper.h"
|
||||
#include <QTimer>
|
||||
#include <QDebug>
|
||||
#include <QLoggingCategory>
|
||||
#include <QRegularExpression>
|
||||
#include <QElapsedTimer>
|
||||
@ -177,7 +177,7 @@ BTDiscovery::BTDiscovery(QObject*) : m_btValid(false),
|
||||
discoveryAgent(nullptr)
|
||||
{
|
||||
if (m_instance) {
|
||||
qDebug() << "trying to create an additional BTDiscovery object";
|
||||
report_info("trying to create an additional BTDiscovery object");
|
||||
return;
|
||||
}
|
||||
m_instance = this;
|
||||
@ -195,11 +195,11 @@ void BTDiscovery::showNonDiveComputers(bool show)
|
||||
void BTDiscovery::BTDiscoveryReDiscover()
|
||||
{
|
||||
#if !defined(Q_OS_IOS)
|
||||
qDebug() << "BTDiscoveryReDiscover: localBtDevice.isValid()" << localBtDevice.isValid();
|
||||
report_info("BTDiscoveryReDiscover: localBtDevice.isValid() %d", localBtDevice.isValid());
|
||||
if (localBtDevice.isValid() &&
|
||||
localBtDevice.hostMode() != QBluetoothLocalDevice::HostPoweredOff) {
|
||||
btPairedDevices.clear();
|
||||
qDebug() << "BTDiscoveryReDiscover: localDevice " + localBtDevice.name() + " is powered on, starting discovery";
|
||||
report_info("BTDiscoveryReDiscover: localDevice %s is powered on, starting discovery", qPrintable(localBtDevice.name()));
|
||||
#else
|
||||
// for iOS we can't use the localBtDevice as iOS is BLE only
|
||||
// we need to find some other way to test if Bluetooth is enabled, though
|
||||
@ -220,13 +220,13 @@ void BTDiscovery::BTDiscoveryReDiscover()
|
||||
connect(discoveryAgent, QOverload<QBluetoothDeviceDiscoveryAgent::Error>::of(&QBluetoothDeviceDiscoveryAgent::error),
|
||||
#endif
|
||||
[this](QBluetoothDeviceDiscoveryAgent::Error error){
|
||||
qDebug() << "device discovery received error" << discoveryAgent->errorString();
|
||||
report_info("device discovery received error %s", qPrintable(discoveryAgent->errorString()));
|
||||
});
|
||||
qDebug() << "discovery methods" << (int)QBluetoothDeviceDiscoveryAgent::supportedDiscoveryMethods();
|
||||
report_info("discovery methods %d", (int)QBluetoothDeviceDiscoveryAgent::supportedDiscoveryMethods());
|
||||
}
|
||||
#if defined(Q_OS_ANDROID)
|
||||
// on Android, we cannot scan for classic devices - we just get the paired ones
|
||||
qDebug() << "starting BLE discovery";
|
||||
report_info("starting BLE discovery");
|
||||
discoveryAgent->start(QBluetoothDeviceDiscoveryAgent::LowEnergyMethod);
|
||||
getBluetoothDevices();
|
||||
// and add the paired devices to the internal data
|
||||
@ -235,10 +235,10 @@ void BTDiscovery::BTDiscoveryReDiscover()
|
||||
for (int i = 0; i < btPairedDevices.length(); i++)
|
||||
btDeviceDiscoveredMain(btPairedDevices[i], true);
|
||||
#else
|
||||
qDebug() << "starting BT/BLE discovery";
|
||||
report_info("starting BT/BLE discovery");
|
||||
discoveryAgent->start();
|
||||
for (int i = 0; i < btPairedDevices.length(); i++)
|
||||
qDebug() << "Paired =" << btPairedDevices[i].name << btPairedDevices[i].address;
|
||||
report_info("Paired = %s %s", qPrintable( btPairedDevices[i].name), qPrintable(btPairedDevices[i].address));
|
||||
#endif
|
||||
|
||||
#if defined(Q_OS_IOS) || (defined(Q_OS_LINUX) && !defined(Q_OS_ANDROID))
|
||||
@ -248,7 +248,7 @@ void BTDiscovery::BTDiscoveryReDiscover()
|
||||
timer.start(3000);
|
||||
#endif
|
||||
} else {
|
||||
qDebug() << "localBtDevice isn't valid or not connectable";
|
||||
report_info("localBtDevice isn't valid or not connectable");
|
||||
m_btValid = false;
|
||||
}
|
||||
}
|
||||
@ -291,10 +291,10 @@ QString markBLEAddress(const QBluetoothDeviceInfo *device)
|
||||
|
||||
void BTDiscovery::btDeviceDiscoveryFinished()
|
||||
{
|
||||
qDebug() << "BT/BLE finished discovery";
|
||||
report_info("BT/BLE finished discovery");
|
||||
QList<QBluetoothDeviceInfo> devList = discoveryAgent->discoveredDevices();
|
||||
for (QBluetoothDeviceInfo device: devList) {
|
||||
qDebug() << device.name() << device.address().toString();
|
||||
report_info("%s %s", qPrintable(device.name()), qPrintable(device.address().toString()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -308,7 +308,7 @@ void BTDiscovery::btDeviceDiscovered(const QBluetoothDeviceInfo &device)
|
||||
const auto serviceUuids = device.serviceUuids();
|
||||
for (QBluetoothUuid id: serviceUuids) {
|
||||
addBtUuid(id);
|
||||
qDebug() << id.toByteArray();
|
||||
report_info("%s", qPrintable(id.toByteArray()));
|
||||
}
|
||||
|
||||
#if defined(Q_OS_IOS) || defined(Q_OS_MACOS) || defined(Q_OS_WIN)
|
||||
@ -337,7 +337,7 @@ void BTDiscovery::btDeviceDiscoveredMain(const btPairedDevice &device, bool from
|
||||
msg = QString("%1 device: '%2' [%3]: ").arg(fromPaired ? "Paired" : "Discovered new").arg(newDevice).arg(device.address);
|
||||
if (newDC) {
|
||||
QString vendor = dc_descriptor_get_vendor(newDC);
|
||||
qDebug() << msg << "this could be a " + vendor;
|
||||
report_info("%s this could be a %s", qPrintable(msg), qPrintable(vendor));
|
||||
btVP.btpdi = device;
|
||||
btVP.dcDescriptor = newDC;
|
||||
btVP.vendorIdx = vendorList.indexOf(vendor);
|
||||
@ -352,7 +352,7 @@ void BTDiscovery::btDeviceDiscoveredMain(const btPairedDevice &device, bool from
|
||||
newDevice += " ";
|
||||
connectionListModel.addAddress(newDevice + device.address);
|
||||
}
|
||||
qDebug() << msg << "not recognized as dive computer";
|
||||
report_info("%s not recognized as dive computer", qPrintable(msg));
|
||||
}
|
||||
|
||||
QList<BTDiscovery::btVendorProduct> BTDiscovery::getBtDcs()
|
||||
@ -407,12 +407,12 @@ void BTDiscovery::getBluetoothDevices()
|
||||
result.address = dev.callObjectMethod("getAddress","()Ljava/lang/String;").toString();
|
||||
result.name = dev.callObjectMethod("getName", "()Ljava/lang/String;").toString();
|
||||
if (btType & 1) { // DEVICE_TYPE_CLASSIC
|
||||
qDebug() << "paired BT classic device type" << btType << "with address" << result.address;
|
||||
report_info("paired BT classic device type %d with address %s", btType, qPrintable(result.address));
|
||||
btPairedDevices.append(result);
|
||||
}
|
||||
if (btType & 2) { // DEVICE_TYPE_LE
|
||||
result.address = QString("LE:%1").arg(result.address);
|
||||
qDebug() << "paired BLE device type" << btType << "with address" << result.address;
|
||||
report_info("paired BLE device type %d with address %s", btType, qPrintable(result.address));
|
||||
btPairedDevices.append(result);
|
||||
}
|
||||
}
|
||||
@ -451,7 +451,7 @@ void BTDiscovery::discoverAddress(QString address)
|
||||
btAddress = extractBluetoothAddress(address);
|
||||
|
||||
if (!btDeviceInfo.keys().contains(address) && !discoveryAgent->isActive()) {
|
||||
qDebug() << "restarting discovery agent";
|
||||
report_info("restarting discovery agent");
|
||||
discoveryAgent->start();
|
||||
}
|
||||
}
|
||||
@ -460,7 +460,7 @@ void BTDiscovery::stopAgent()
|
||||
{
|
||||
if (!discoveryAgent)
|
||||
return;
|
||||
qDebug() << "---> stopping the discovery agent";
|
||||
report_info("---> stopping the discovery agent");
|
||||
discoveryAgent->stop();
|
||||
}
|
||||
|
||||
@ -491,7 +491,7 @@ QString extractBluetoothNameAddress(const QString &address, QString &name)
|
||||
name = m.captured(1).trimmed();
|
||||
return extractedAddress;
|
||||
}
|
||||
qDebug() << "can't parse address" << address;
|
||||
report_info("can't parse address %s", qPrintable(address));
|
||||
return QString();
|
||||
}
|
||||
|
||||
@ -507,7 +507,7 @@ QBluetoothDeviceInfo getBtDeviceInfo(const QString &devaddr)
|
||||
return btDeviceInfo[devaddr];
|
||||
}
|
||||
if(!btDeviceInfo.keys().contains(devaddr)) {
|
||||
qDebug() << "still looking scan is still running, we should just wait for a few moments";
|
||||
report_info("still looking scan is still running, we should just wait for a few moments");
|
||||
// wait for a maximum of 30 more seconds
|
||||
// yes, that seems crazy, but on my Mac I see this take more than 20 seconds
|
||||
QElapsedTimer timer;
|
||||
@ -521,7 +521,7 @@ QBluetoothDeviceInfo getBtDeviceInfo(const QString &devaddr)
|
||||
QThread::msleep(100);
|
||||
} while (timer.elapsed() < 30000);
|
||||
}
|
||||
qDebug() << "notify user that we can't find" << devaddr;
|
||||
report_info("notify user that we can't find %s", qPrintable(devaddr));
|
||||
return QBluetoothDeviceInfo();
|
||||
}
|
||||
#endif // BT_SUPPORT
|
||||
|
||||
@ -10,6 +10,7 @@
|
||||
#include "qthelper.h"
|
||||
#include "git-access.h"
|
||||
#include "errorhelper.h"
|
||||
#include "core/format.h"
|
||||
#include "core/subsurface-string.h"
|
||||
#include "core/membuffer.h"
|
||||
#include "core/settings/qPrefCloudStorage.h"
|
||||
@ -34,7 +35,7 @@ CheckCloudConnection::CheckCloudConnection(QObject *parent) :
|
||||
bool CheckCloudConnection::checkServer()
|
||||
{
|
||||
if (verbose)
|
||||
fprintf(stderr, "Checking cloud connection...\n");
|
||||
report_info("Checking cloud connection...");
|
||||
|
||||
QEventLoop loop;
|
||||
QNetworkAccessManager *mgr = new QNetworkAccessManager();
|
||||
@ -72,10 +73,10 @@ bool CheckCloudConnection::checkServer()
|
||||
}
|
||||
}
|
||||
if (verbose)
|
||||
qDebug() << "connection test to cloud server" << prefs.cloud_base_url << "failed" <<
|
||||
reply->error() << reply->errorString() <<
|
||||
reply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt() <<
|
||||
reply->readAll();
|
||||
report_info("connection test to cloud server %s failed %d %s %d %s", prefs.cloud_base_url,
|
||||
static_cast<int>(reply->error()), qPrintable(reply->errorString()),
|
||||
reply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt(),
|
||||
qPrintable(reply->readAll()));
|
||||
} while (nextServer());
|
||||
// if none of the servers was reachable, update the user and switch to git_local_only
|
||||
git_storage_update_progress(qPrintable(tr("Cloud connection failed")));
|
||||
@ -89,9 +90,9 @@ bool CheckCloudConnection::checkServer()
|
||||
|
||||
void CheckCloudConnection::sslErrors(const QList<QSslError> &errorList)
|
||||
{
|
||||
qDebug() << "Received error response trying to set up https connection with cloud storage backend:";
|
||||
report_info("Received error response trying to set up https connection with cloud storage backend:");
|
||||
for (QSslError err: errorList)
|
||||
qDebug() << err.errorString();
|
||||
report_info("%s", qPrintable(err.errorString()));
|
||||
}
|
||||
|
||||
bool CheckCloudConnection::nextServer()
|
||||
@ -119,12 +120,12 @@ bool CheckCloudConnection::nextServer()
|
||||
strcpy(baseurl, "https://");
|
||||
strncat(baseurl, server, s);
|
||||
strcat(baseurl, "/");
|
||||
qDebug() << "failed to connect to" << prefs.cloud_base_url << "next server to try: " << baseurl;
|
||||
report_info("failed to connect to %s next server to try: %s", prefs.cloud_base_url, baseurl);
|
||||
prefs.cloud_base_url = baseurl;
|
||||
git_storage_update_progress(qPrintable(tr("Trying different cloud server...")));
|
||||
return true;
|
||||
}
|
||||
qDebug() << "failed to connect to any of the Subsurface cloud servers, giving up";
|
||||
report_info("failed to connect to any of the Subsurface cloud servers, giving up");
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -143,7 +144,7 @@ void CheckCloudConnection::gotIP(QNetworkReply *reply)
|
||||
if (reply->error() != QNetworkReply::NoError) {
|
||||
// whatever, just use the default host
|
||||
if (verbose)
|
||||
qDebug() << __FUNCTION__ << "got error reply from ip webservice - not changing cloud host";
|
||||
report_info("%s got error reply from ip webservice - not changing cloud host", __func__);
|
||||
return;
|
||||
}
|
||||
QString addressString = reply->readAll();
|
||||
@ -153,11 +154,11 @@ void CheckCloudConnection::gotIP(QNetworkReply *reply)
|
||||
if (addr.isNull()) {
|
||||
// this isn't an address, don't try to update the cloud host
|
||||
if (verbose)
|
||||
qDebug() << __FUNCTION__ << "returned address doesn't appear to be valid (" << addressString << ") - not changing cloud host";
|
||||
report_info("%s returned address doesn't appear to be valid (%s) - not changing cloud host", __func__, qPrintable(addressString));
|
||||
return;
|
||||
}
|
||||
if (verbose)
|
||||
qDebug() << "IP used for cloud server access" << addressString;
|
||||
report_info("IP used for cloud server access %s", qPrintable(addressString));
|
||||
// now figure out which continent we are on
|
||||
QNetworkRequest request(QString(GET_CONTINENT_API).arg(addressString));
|
||||
request.setRawHeader("Accept", "text/plain");
|
||||
@ -172,7 +173,7 @@ void CheckCloudConnection::gotContinent(QNetworkReply *reply)
|
||||
if (reply->error() != QNetworkReply::NoError) {
|
||||
// whatever, just use the default host
|
||||
if (verbose)
|
||||
qDebug() << __FUNCTION__ << "got error reply from ip location webservice - not changing cloud host";
|
||||
report_info("%s got error reply from ip location webservice - not changing cloud host", __func__);
|
||||
return;
|
||||
}
|
||||
QString continentString = reply->readAll();
|
||||
@ -193,7 +194,7 @@ void CheckCloudConnection::gotContinent(QNetworkReply *reply)
|
||||
base_url = "https://" CLOUD_HOST_EU "/";
|
||||
if (!same_string(base_url, prefs.cloud_base_url)) {
|
||||
if (verbose)
|
||||
qDebug() << "remember cloud server" << base_url << "based on IP location in " << continentString;
|
||||
report_info("remember cloud server %s based on IP location in %s", base_url, qPrintable(continentString));
|
||||
qPrefCloudStorage::instance()->store_cloud_base_url(base_url);
|
||||
}
|
||||
}
|
||||
@ -202,17 +203,18 @@ void CheckCloudConnection::gotContinent(QNetworkReply *reply)
|
||||
extern "C" bool canReachCloudServer(struct git_info *info)
|
||||
{
|
||||
if (verbose)
|
||||
qWarning() << "Cloud storage: checking connection to cloud server" << info->url;
|
||||
qWarning() << "Cloud storage: checking connection to cloud server" << info->url.c_str();
|
||||
bool connection = CheckCloudConnection().checkServer();
|
||||
if (strstr(info->url, prefs.cloud_base_url) == nullptr) {
|
||||
if (info->url.find(prefs.cloud_base_url) == std::string::npos) {
|
||||
// we switched the cloud URL - likely because we couldn't reach the server passed in
|
||||
// the strstr with the offset is designed so we match the right component in the name;
|
||||
// the cloud_base_url ends with a '/', so we need the text starting at "git/..."
|
||||
char *newremote = format_string("%s%s", prefs.cloud_base_url, strstr(info->url, "org/git/") + 4);
|
||||
if (verbose)
|
||||
qDebug() << "updating remote to: " << newremote;
|
||||
free((void*)info->url);
|
||||
info->url = newremote;
|
||||
size_t pos = info->url.find("org/git/");
|
||||
if (pos != std::string::npos) {
|
||||
info->url = format_string_std("%s%s", prefs.cloud_base_url, info->url.c_str() + pos + 4);
|
||||
if (verbose)
|
||||
report_info("updating remote to: %s", info->url.c_str());
|
||||
}
|
||||
}
|
||||
return connection;
|
||||
}
|
||||
|
||||
@ -74,31 +74,30 @@ QNetworkReply* CloudStorageAuthenticate::deleteAccount(const QString& email, con
|
||||
|
||||
void CloudStorageAuthenticate::deleteFinished()
|
||||
{
|
||||
QString cloudAuthReply(reply->readAll());
|
||||
qDebug() << "Completed connection with cloud storage backend, response" << cloudAuthReply;
|
||||
std::string cloudAuthReply = reply->readAll().toStdString();
|
||||
report_info("Completed connection with cloud storage backend, response %s", cloudAuthReply.c_str());
|
||||
emit finishedDelete();
|
||||
}
|
||||
|
||||
void CloudStorageAuthenticate::uploadFinished()
|
||||
{
|
||||
static QString myLastError;
|
||||
static std::string myLastError;
|
||||
|
||||
QString cloudAuthReply(reply->readAll());
|
||||
qDebug() << "Completed connection with cloud storage backend, response" << cloudAuthReply;
|
||||
std::string cloudAuthReply = reply->readAll().toStdString();
|
||||
report_info("Completed connection with cloud storage backend, response %s", cloudAuthReply.c_str());
|
||||
|
||||
if (cloudAuthReply == QLatin1String("[VERIFIED]") || cloudAuthReply == QLatin1String("[OK]")) {
|
||||
if (cloudAuthReply == "[VERIFIED]" || cloudAuthReply == "[OK]") {
|
||||
qPrefCloudStorage::set_cloud_verification_status(qPrefCloudStorage::CS_VERIFIED);
|
||||
/* TODO: Move this to a correct place
|
||||
NotificationWidget *nw = MainWindow::instance()->getNotificationWidget();
|
||||
if (nw->getNotificationText() == myLastError)
|
||||
if (nw->getNotificationText().toStdString() == myLastError)
|
||||
nw->hideNotification();
|
||||
*/
|
||||
myLastError.clear();
|
||||
} else if (cloudAuthReply == QLatin1String("[VERIFY]") ||
|
||||
cloudAuthReply == QLatin1String("Invalid PIN")) {
|
||||
} else if (cloudAuthReply == "[VERIFY]" || cloudAuthReply == "Invalid PIN") {
|
||||
qPrefCloudStorage::set_cloud_verification_status(qPrefCloudStorage::CS_NEED_TO_VERIFY);
|
||||
report_error("%s", qPrintable(tr("Cloud account verification required, enter PIN in preferences")));
|
||||
} else if (cloudAuthReply == QLatin1String("[PASSWDCHANGED]")) {
|
||||
} else if (cloudAuthReply == "[PASSWDCHANGED]") {
|
||||
qPrefCloudStorage::set_cloud_storage_password(cloudNewPassword);
|
||||
cloudNewPassword.clear();
|
||||
emit passwordChangeSuccessful();
|
||||
@ -106,28 +105,28 @@ void CloudStorageAuthenticate::uploadFinished()
|
||||
} else {
|
||||
qPrefCloudStorage::set_cloud_verification_status(qPrefCloudStorage::CS_INCORRECT_USER_PASSWD);
|
||||
myLastError = cloudAuthReply;
|
||||
report_error("%s", qPrintable(cloudAuthReply));
|
||||
report_error("%s", cloudAuthReply.c_str());
|
||||
}
|
||||
emit finishedAuthenticate();
|
||||
}
|
||||
|
||||
void CloudStorageAuthenticate::uploadError(QNetworkReply::NetworkError)
|
||||
{
|
||||
qDebug() << "Received error response from cloud storage backend:" << reply->errorString();
|
||||
report_info("Received error response from cloud storage backend: %s", qPrintable(reply->errorString()));
|
||||
}
|
||||
|
||||
void CloudStorageAuthenticate::sslErrors(const QList<QSslError> &errorList)
|
||||
{
|
||||
if (verbose) {
|
||||
qDebug() << "Received error response trying to set up https connection with cloud storage backend:";
|
||||
report_info("Received error response trying to set up https connection with cloud storage backend:");
|
||||
for (QSslError err: errorList) {
|
||||
qDebug() << err.errorString();
|
||||
report_info("%s", qPrintable(err.errorString()));
|
||||
}
|
||||
}
|
||||
QSslConfiguration conf = reply->sslConfiguration();
|
||||
QSslCertificate cert = conf.peerCertificate();
|
||||
QByteArray hexDigest = cert.digest().toHex();
|
||||
qDebug() << "got invalid SSL certificate with hex digest" << hexDigest;
|
||||
report_info("got invalid SSL certificate with hex digest %s", qPrintable(hexDigest));
|
||||
}
|
||||
|
||||
QNetworkAccessManager *manager()
|
||||
|
||||
@ -721,7 +721,6 @@ int datatrak_import(std::string &mem, std::string &wl_mem, struct divelog *log)
|
||||
i++;
|
||||
}
|
||||
out:
|
||||
taglist_cleanup(&g_tag_list);
|
||||
sort_dive_table(log->dives);
|
||||
return rc;
|
||||
bail:
|
||||
|
||||
@ -13,8 +13,6 @@
|
||||
* set_gf() - set Buehlmann gradient factors
|
||||
* set_vpmb_conservatism() - set VPM-B conservatism value
|
||||
* clear_deco()
|
||||
* cache_deco_state()
|
||||
* restore_deco_state()
|
||||
* dump_tissues()
|
||||
*/
|
||||
#include <stdlib.h>
|
||||
@ -218,7 +216,7 @@ static double vpmb_tolerated_ambient_pressure(struct deco_state *ds, double refe
|
||||
return ds->tissue_n2_sat[ci] + ds->tissue_he_sat[ci] + vpmb_config.other_gases_pressure - total_gradient;
|
||||
}
|
||||
|
||||
double tissue_tolerance_calc(struct deco_state *ds, const struct dive *dive, double pressure, bool in_planner)
|
||||
extern "C" double tissue_tolerance_calc(struct deco_state *ds, const struct dive *dive, double pressure, bool in_planner)
|
||||
{
|
||||
int ci = -1;
|
||||
double ret_tolerance_limit_ambient_pressure = 0.0;
|
||||
@ -325,7 +323,7 @@ static double calc_surface_phase(double surface_pressure, double he_pressure, do
|
||||
return 0;
|
||||
}
|
||||
|
||||
void vpmb_start_gradient(struct deco_state *ds)
|
||||
extern "C" void vpmb_start_gradient(struct deco_state *ds)
|
||||
{
|
||||
int ci;
|
||||
|
||||
@ -335,7 +333,7 @@ void vpmb_start_gradient(struct deco_state *ds)
|
||||
}
|
||||
}
|
||||
|
||||
void vpmb_next_gradient(struct deco_state *ds, double deco_time, double surface_pressure, bool in_planner)
|
||||
extern "C" void vpmb_next_gradient(struct deco_state *ds, double deco_time, double surface_pressure, bool in_planner)
|
||||
{
|
||||
int ci;
|
||||
double n2_b, n2_c;
|
||||
@ -381,7 +379,7 @@ static double solve_cubic(double A, double B, double C)
|
||||
}
|
||||
|
||||
|
||||
void nuclear_regeneration(struct deco_state *ds, double time)
|
||||
extern "C" void nuclear_regeneration(struct deco_state *ds, double time)
|
||||
{
|
||||
time /= 60.0;
|
||||
int ci;
|
||||
@ -413,7 +411,7 @@ static double calc_inner_pressure(double crit_radius, double onset_tension, doub
|
||||
}
|
||||
|
||||
// Calculates the crushing pressure in the given moment. Updates crushing_onset_tension and critical radius if needed
|
||||
void calc_crushing_pressure(struct deco_state *ds, double pressure)
|
||||
extern "C" void calc_crushing_pressure(struct deco_state *ds, double pressure)
|
||||
{
|
||||
int ci;
|
||||
double gradient;
|
||||
@ -438,16 +436,15 @@ void calc_crushing_pressure(struct deco_state *ds, double pressure)
|
||||
n2_crushing_pressure = pressure - n2_inner_pressure;
|
||||
he_crushing_pressure = pressure - he_inner_pressure;
|
||||
}
|
||||
ds->max_n2_crushing_pressure[ci] = MAX(ds->max_n2_crushing_pressure[ci], n2_crushing_pressure);
|
||||
ds->max_he_crushing_pressure[ci] = MAX(ds->max_he_crushing_pressure[ci], he_crushing_pressure);
|
||||
ds->max_n2_crushing_pressure[ci] = std::max(ds->max_n2_crushing_pressure[ci], n2_crushing_pressure);
|
||||
ds->max_he_crushing_pressure[ci] = std::max(ds->max_he_crushing_pressure[ci], he_crushing_pressure);
|
||||
}
|
||||
ds->max_ambient_pressure = MAX(pressure, ds->max_ambient_pressure);
|
||||
ds->max_ambient_pressure = std::max(pressure, ds->max_ambient_pressure);
|
||||
}
|
||||
|
||||
/* add period_in_seconds at the given pressure and gas to the deco calculation */
|
||||
void add_segment(struct deco_state *ds, double pressure, struct gasmix gasmix, int period_in_seconds, int ccpo2, enum divemode_t divemode, int sac, bool in_planner)
|
||||
extern "C" void add_segment(struct deco_state *ds, double pressure, struct gasmix gasmix, int period_in_seconds, int ccpo2, enum divemode_t divemode, int, bool in_planner)
|
||||
{
|
||||
UNUSED(sac);
|
||||
int ci;
|
||||
struct gas_pressures pressures;
|
||||
bool icd = false;
|
||||
@ -479,7 +476,7 @@ void add_segment(struct deco_state *ds, double pressure, struct gasmix gasmix, i
|
||||
}
|
||||
|
||||
#if DECO_CALC_DEBUG
|
||||
void dump_tissues(struct deco_state *ds)
|
||||
extern "C" void dump_tissues(struct deco_state *ds)
|
||||
{
|
||||
int ci;
|
||||
printf("N2 tissues:");
|
||||
@ -492,7 +489,7 @@ void dump_tissues(struct deco_state *ds)
|
||||
}
|
||||
#endif
|
||||
|
||||
void clear_vpmb_state(struct deco_state *ds)
|
||||
extern "C" void clear_vpmb_state(struct deco_state *ds)
|
||||
{
|
||||
int ci;
|
||||
for (ci = 0; ci < 16; ci++) {
|
||||
@ -504,7 +501,7 @@ void clear_vpmb_state(struct deco_state *ds)
|
||||
ds->max_bottom_ceiling_pressure.mbar = 0;
|
||||
}
|
||||
|
||||
void clear_deco(struct deco_state *ds, double surface_pressure, bool in_planner)
|
||||
extern "C" void clear_deco(struct deco_state *ds, double surface_pressure, bool in_planner)
|
||||
{
|
||||
int ci;
|
||||
|
||||
@ -523,19 +520,17 @@ void clear_deco(struct deco_state *ds, double surface_pressure, bool in_planner)
|
||||
ds->ci_pointing_to_guiding_tissue = -1;
|
||||
}
|
||||
|
||||
void cache_deco_state(struct deco_state *src, struct deco_state **cached_datap)
|
||||
void deco_state_cache::cache(const struct deco_state *src)
|
||||
{
|
||||
struct deco_state *data = *cached_datap;
|
||||
|
||||
if (!data) {
|
||||
data = malloc(sizeof(struct deco_state));
|
||||
*cached_datap = data;
|
||||
}
|
||||
if (!data)
|
||||
data = std::make_unique<deco_state>();
|
||||
*data = *src;
|
||||
}
|
||||
|
||||
void restore_deco_state(struct deco_state *data, struct deco_state *target, bool keep_vpmb_state)
|
||||
void deco_state_cache::restore(struct deco_state *target, bool keep_vpmb_state) const
|
||||
{
|
||||
if (!data)
|
||||
return;
|
||||
if (keep_vpmb_state) {
|
||||
int ci;
|
||||
for (ci = 0; ci < 16; ci++) {
|
||||
@ -548,10 +543,9 @@ void restore_deco_state(struct deco_state *data, struct deco_state *target, bool
|
||||
data->max_bottom_ceiling_pressure = target->max_bottom_ceiling_pressure;
|
||||
}
|
||||
*target = *data;
|
||||
|
||||
}
|
||||
|
||||
int deco_allowed_depth(double tissues_tolerance, double surface_pressure, const struct dive *dive, bool smooth)
|
||||
extern "C" int deco_allowed_depth(double tissues_tolerance, double surface_pressure, const struct dive *dive, bool smooth)
|
||||
{
|
||||
int depth;
|
||||
double pressure_delta;
|
||||
@ -570,7 +564,7 @@ int deco_allowed_depth(double tissues_tolerance, double surface_pressure, const
|
||||
return depth;
|
||||
}
|
||||
|
||||
void set_gf(short gflow, short gfhigh)
|
||||
extern "C" void set_gf(short gflow, short gfhigh)
|
||||
{
|
||||
if (gflow != -1)
|
||||
buehlmann_config.gf_low = (double)gflow / 100.0;
|
||||
@ -578,7 +572,7 @@ void set_gf(short gflow, short gfhigh)
|
||||
buehlmann_config.gf_high = (double)gfhigh / 100.0;
|
||||
}
|
||||
|
||||
void set_vpmb_conservatism(short conservatism)
|
||||
extern "C" void set_vpmb_conservatism(short conservatism)
|
||||
{
|
||||
if (conservatism < 0)
|
||||
vpmb_config.conservatism = 0;
|
||||
@ -588,21 +582,21 @@ void set_vpmb_conservatism(short conservatism)
|
||||
vpmb_config.conservatism = conservatism;
|
||||
}
|
||||
|
||||
double get_gf(struct deco_state *ds, double ambpressure_bar, const struct dive *dive)
|
||||
extern "C" double get_gf(struct deco_state *ds, double ambpressure_bar, const struct dive *dive)
|
||||
{
|
||||
double surface_pressure_bar = get_surface_pressure_in_mbar(dive, true) / 1000.0;
|
||||
double gf_low = buehlmann_config.gf_low;
|
||||
double gf_high = buehlmann_config.gf_high;
|
||||
double gf;
|
||||
if (ds->gf_low_pressure_this_dive > surface_pressure_bar)
|
||||
gf = MAX((double)gf_low, (ambpressure_bar - surface_pressure_bar) /
|
||||
gf = std::max((double)gf_low, (ambpressure_bar - surface_pressure_bar) /
|
||||
(ds->gf_low_pressure_this_dive - surface_pressure_bar) * (gf_low - gf_high) + gf_high);
|
||||
else
|
||||
gf = gf_low;
|
||||
return gf;
|
||||
}
|
||||
|
||||
double regressiona(const struct deco_state *ds)
|
||||
extern "C" double regressiona(const struct deco_state *ds)
|
||||
{
|
||||
if (ds->sum1 > 1) {
|
||||
double avxy = ds->sumxy / ds->sum1;
|
||||
@ -615,7 +609,7 @@ double regressiona(const struct deco_state *ds)
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
double regressionb(const struct deco_state *ds)
|
||||
extern "C" double regressionb(const struct deco_state *ds)
|
||||
{
|
||||
if (ds->sum1)
|
||||
return ds->sumy / ds->sum1 - ds->sumx * regressiona(ds) / ds->sum1;
|
||||
@ -623,14 +617,14 @@ double regressionb(const struct deco_state *ds)
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
void reset_regression(struct deco_state *ds)
|
||||
extern "C" void reset_regression(struct deco_state *ds)
|
||||
{
|
||||
ds->sum1 = 0;
|
||||
ds->sumxx = ds->sumx = 0L;
|
||||
ds->sumy = ds->sumxy = 0.0;
|
||||
}
|
||||
|
||||
void update_regression(struct deco_state *ds, const struct dive *dive)
|
||||
extern "C" void update_regression(struct deco_state *ds, const struct dive *dive)
|
||||
{
|
||||
if (!ds->plot_depth)
|
||||
return;
|
||||
17
core/deco.h
17
core/deco.h
@ -56,8 +56,6 @@ extern void clear_deco(struct deco_state *ds, double surface_pressure, bool in_p
|
||||
extern void dump_tissues(struct deco_state *ds);
|
||||
extern void set_gf(short gflow, short gfhigh);
|
||||
extern void set_vpmb_conservatism(short conservatism);
|
||||
extern void cache_deco_state(struct deco_state *source, struct deco_state **datap);
|
||||
extern void restore_deco_state(struct deco_state *data, struct deco_state *target, bool keep_vpmb_state);
|
||||
extern void nuclear_regeneration(struct deco_state *ds, double time);
|
||||
extern void vpmb_start_gradient(struct deco_state *ds);
|
||||
extern void vpmb_next_gradient(struct deco_state *ds, double deco_time, double surface_pressure, bool in_planner);
|
||||
@ -74,6 +72,21 @@ extern void update_regression(struct deco_state *ds, const struct dive *dive);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
|
||||
// C++ only functions
|
||||
|
||||
#include <memory>
|
||||
struct deco_state_cache {
|
||||
// Test if there is cached data
|
||||
operator bool () {
|
||||
return !!data;
|
||||
}
|
||||
void cache(const struct deco_state *source);
|
||||
void restore(struct deco_state *target, bool keep_vpmb_state) const;
|
||||
private:
|
||||
std::unique_ptr<deco_state> data;
|
||||
};
|
||||
|
||||
#endif
|
||||
|
||||
#endif // DECO_H
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
#include "divelist.h"
|
||||
#include "divelog.h"
|
||||
#include "subsurface-string.h"
|
||||
#include "errorhelper.h" // for verbose flag
|
||||
#include "errorhelper.h"
|
||||
#include "selection.h"
|
||||
#include "core/settings/qPrefDiveComputer.h"
|
||||
|
||||
@ -60,9 +60,9 @@ void device::showchanges(const std::string &n) const
|
||||
{
|
||||
if (nickName != n) {
|
||||
if (!n.empty())
|
||||
qDebug("new nickname %s for DC model %s serial %s", n.c_str(), model.c_str(), serialNumber.c_str());
|
||||
report_info("new nickname %s for DC model %s serial %s", n.c_str(), model.c_str(), serialNumber.c_str());
|
||||
else
|
||||
qDebug("deleted nickname %s for DC model %s serial %s", nickName.c_str(), model.c_str(), serialNumber.c_str());
|
||||
report_info("deleted nickname %s for DC model %s serial %s", nickName.c_str(), model.c_str(), serialNumber.c_str());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -339,28 +339,27 @@ extern "C" void selective_copy_dive(const struct dive *s, struct dive *d, struct
|
||||
}
|
||||
#undef CONDITIONAL_COPY_STRING
|
||||
|
||||
/* copies all events from all dive computers before a given time
|
||||
/* copies all events from the given dive computer before a given time
|
||||
this is used when editing a dive in the planner to preserve the events
|
||||
of the old dive */
|
||||
extern "C" void copy_events_until(const struct dive *sd, struct dive *dd, int time)
|
||||
extern "C" void copy_events_until(const struct dive *sd, struct dive *dd, int dcNr, int time)
|
||||
{
|
||||
if (!sd || !dd)
|
||||
return;
|
||||
|
||||
const struct divecomputer *s = &sd->dc;
|
||||
struct divecomputer *d = &dd->dc;
|
||||
struct divecomputer *d = get_dive_dc(dd, dcNr);
|
||||
|
||||
while (s && d) {
|
||||
const struct event *ev;
|
||||
ev = s->events;
|
||||
while (ev != NULL) {
|
||||
// Don't add events the planner knows about
|
||||
if (ev->time.seconds < time && !event_is_gaschange(ev) && !event_is_divemodechange(ev))
|
||||
add_event(d, ev->time.seconds, ev->type, ev->flags, ev->value, ev->name);
|
||||
ev = ev->next;
|
||||
}
|
||||
s = s->next;
|
||||
d = d->next;
|
||||
if (!s || !d)
|
||||
return;
|
||||
|
||||
const struct event *ev;
|
||||
ev = s->events;
|
||||
while (ev != NULL) {
|
||||
// Don't add events the planner knows about
|
||||
if (ev->time.seconds < time && !event_is_gaschange(ev) && !event_is_divemodechange(ev))
|
||||
add_event(d, ev->time.seconds, ev->type, ev->flags, ev->value, ev->name);
|
||||
ev = ev->next;
|
||||
}
|
||||
}
|
||||
|
||||
@ -608,7 +607,7 @@ extern "C" int explicit_first_cylinder(const struct dive *dive, const struct div
|
||||
if (ev && ((dc->sample && ev->time.seconds == dc->sample[0].time.seconds) || ev->time.seconds <= 1))
|
||||
res = get_cylinder_index(dive, ev);
|
||||
else if (dc->divemode == CCR)
|
||||
res = MAX(get_cylinder_idx_by_use(dive, DILUENT), res);
|
||||
res = std::max(get_cylinder_idx_by_use(dive, DILUENT), res);
|
||||
}
|
||||
return res < dive->cylinders.nr ? res : 0;
|
||||
}
|
||||
@ -658,7 +657,7 @@ extern "C" void update_setpoint_events(const struct dive *dive, struct divecompu
|
||||
ev->value = new_setpoint;
|
||||
} else {
|
||||
if (!add_event(dc, 0, SAMPLE_EVENT_PO2, 0, new_setpoint, "SP change"))
|
||||
fprintf(stderr, "Could not add setpoint change event\n");
|
||||
report_info("Could not add setpoint change event");
|
||||
}
|
||||
}
|
||||
|
||||
@ -852,7 +851,7 @@ static void fixup_duration(struct dive *dive)
|
||||
duration_t duration = { };
|
||||
|
||||
for_each_relevant_dc (dive, dc) {
|
||||
duration.seconds = MAX(duration.seconds, dc->duration.seconds);
|
||||
duration.seconds = std::max(duration.seconds, dc->duration.seconds);
|
||||
}
|
||||
dive->duration.seconds = duration.seconds;
|
||||
}
|
||||
@ -969,7 +968,7 @@ static void fixup_dc_depths(struct dive *dive, struct divecomputer *dc)
|
||||
}
|
||||
|
||||
update_depth(&dc->maxdepth, maxdepth);
|
||||
if (!has_planned(dive, false) || !is_dc_planner(dc))
|
||||
if (!is_logged(dive) || !is_dc_planner(dc))
|
||||
if (maxdepth > dive->maxdepth.mm)
|
||||
dive->maxdepth.mm = maxdepth;
|
||||
}
|
||||
@ -1311,8 +1310,8 @@ extern "C" struct dive *fixup_dive(struct dive *dive)
|
||||
}
|
||||
|
||||
/* Don't pick a zero for MERGE_MIN() */
|
||||
#define MERGE_MAX(res, a, b, n) res->n = MAX(a->n, b->n)
|
||||
#define MERGE_MIN(res, a, b, n) res->n = (a->n) ? (b->n) ? MIN(a->n, b->n) : (a->n) : (b->n)
|
||||
#define MERGE_MAX(res, a, b, n) res->n = std::max(a->n, b->n)
|
||||
#define MERGE_MIN(res, a, b, n) res->n = (a->n) ? (b->n) ? std::min(a->n, b->n) : (a->n) : (b->n)
|
||||
#define MERGE_TXT(res, a, b, n, sep) res->n = merge_text(a->n, b->n, sep)
|
||||
#define MERGE_NONZERO(res, a, b, n) res->n = a->n ? a->n : b->n
|
||||
|
||||
@ -2310,8 +2309,8 @@ static int likely_same_dive(const struct dive *a, const struct dive *b)
|
||||
int match, fuzz = 20 * 60;
|
||||
|
||||
/* don't merge manually added dives with anything */
|
||||
if (is_manually_added_dc(&a->dc) ||
|
||||
is_manually_added_dc(&b->dc))
|
||||
if (is_dc_manually_added_dive(&a->dc) ||
|
||||
is_dc_manually_added_dive(&b->dc))
|
||||
return 0;
|
||||
|
||||
/*
|
||||
@ -2333,7 +2332,7 @@ static int likely_same_dive(const struct dive *a, const struct dive *b)
|
||||
* Allow a time difference due to dive computer time
|
||||
* setting etc. Check if they overlap.
|
||||
*/
|
||||
fuzz = MAX(a->duration.seconds, b->duration.seconds) / 2;
|
||||
fuzz = std::max(a->duration.seconds, b->duration.seconds) / 2;
|
||||
if (fuzz < 60)
|
||||
fuzz = 60;
|
||||
|
||||
@ -2550,19 +2549,29 @@ static void join_dive_computers(struct dive *d, struct divecomputer *res,
|
||||
remove_redundant_dc(res, prefer_downloaded);
|
||||
}
|
||||
|
||||
// Does this dive have a dive computer for which is_dc_planner has value planned
|
||||
extern "C" bool has_planned(const struct dive *dive, bool planned)
|
||||
static bool has_dc_type(const struct dive *dive, bool dc_is_planner)
|
||||
{
|
||||
const struct divecomputer *dc = &dive->dc;
|
||||
|
||||
while (dc) {
|
||||
if (is_dc_planner(&dive->dc) == planned)
|
||||
if (is_dc_planner(dc) == dc_is_planner)
|
||||
return true;
|
||||
dc = dc->next;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Does this dive have a dive computer for which is_dc_planner has value planned
|
||||
extern "C" bool is_planned(const struct dive *dive)
|
||||
{
|
||||
return has_dc_type(dive, true);
|
||||
}
|
||||
|
||||
extern "C" bool is_logged(const struct dive *dive)
|
||||
{
|
||||
return has_dc_type(dive, false);
|
||||
}
|
||||
|
||||
/*
|
||||
* Merging two dives can be subtle, because there's two different ways
|
||||
* of merging:
|
||||
@ -3063,7 +3072,7 @@ extern "C" struct dive *make_first_dc(const struct dive *d, int dc_number)
|
||||
;
|
||||
if (!dc) {
|
||||
free(newdc);
|
||||
fprintf(stderr, "data inconsistent: can't find the current DC");
|
||||
report_info("data inconsistent: can't find the current DC");
|
||||
return res;
|
||||
}
|
||||
dc->next = old_dc->next;
|
||||
@ -3235,11 +3244,11 @@ extern "C" int depth_to_mbar(int depth, const struct dive *dive)
|
||||
|
||||
extern "C" double depth_to_mbarf(int depth, const struct dive *dive)
|
||||
{
|
||||
// To downloaded and planned dives, use DC's values
|
||||
// For downloaded and planned dives, use DC's values
|
||||
int salinity = dive->dc.salinity;
|
||||
pressure_t surface_pressure = dive->dc.surface_pressure;
|
||||
|
||||
if (is_manually_added_dc(&dive->dc)) { // To manual dives, salinity and pressure in another place...
|
||||
if (is_dc_manually_added_dive(&dive->dc)) { // For manual dives, salinity and pressure in another place...
|
||||
surface_pressure = dive->surface_pressure;
|
||||
salinity = dive->user_salinity;
|
||||
}
|
||||
@ -3262,8 +3271,8 @@ extern "C" double depth_to_atm(int depth, const struct dive *dive)
|
||||
* take care of this, but the Uemis we support natively */
|
||||
extern "C" int rel_mbar_to_depth(int mbar, const struct dive *dive)
|
||||
{
|
||||
// To downloaded and planned dives, use DC's salinity. Manual dives, use user's salinity
|
||||
int salinity = is_manually_added_dc(&dive->dc) ? dive->user_salinity : dive->dc.salinity;
|
||||
// For downloaded and planned dives, use DC's salinity. Manual dives, use user's salinity
|
||||
int salinity = is_dc_manually_added_dive(&dive->dc) ? dive->user_salinity : dive->dc.salinity;
|
||||
if (!salinity)
|
||||
salinity = SEAWATER_SALINITY;
|
||||
|
||||
@ -3274,8 +3283,8 @@ extern "C" int rel_mbar_to_depth(int mbar, const struct dive *dive)
|
||||
|
||||
extern "C" int mbar_to_depth(int mbar, const struct dive *dive)
|
||||
{
|
||||
// To downloaded and planned dives, use DC's pressure. Manual dives, use user's pressure
|
||||
pressure_t surface_pressure = is_manually_added_dc(&dive->dc)
|
||||
// For downloaded and planned dives, use DC's pressure. Manual dives, use user's pressure
|
||||
pressure_t surface_pressure = is_dc_manually_added_dive(&dive->dc)
|
||||
? dive->surface_pressure
|
||||
: dive->dc.surface_pressure;
|
||||
|
||||
@ -3386,7 +3395,7 @@ extern "C" struct dive *get_dive_by_uniq_id(int id)
|
||||
}
|
||||
#ifdef DEBUG
|
||||
if (dive == NULL) {
|
||||
fprintf(stderr, "Invalid id %x passed to get_dive_by_diveid, try to fix the code\n", id);
|
||||
report_info("Invalid id %x passed to get_dive_by_diveid, try to fix the code", id);
|
||||
exit(1);
|
||||
}
|
||||
#endif
|
||||
@ -3404,7 +3413,7 @@ extern "C" int get_idx_by_uniq_id(int id)
|
||||
}
|
||||
#ifdef DEBUG
|
||||
if (dive == NULL) {
|
||||
fprintf(stderr, "Invalid id %x passed to get_dive_by_diveid, try to fix the code\n", id);
|
||||
report_info("Invalid id %x passed to get_dive_by_diveid, try to fix the code", id);
|
||||
exit(1);
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -141,8 +141,7 @@ void split_divecomputer(const struct dive *src, int num, struct dive **out1, str
|
||||
for (_dc = &_dive->dc; _dc; _dc = _dc->next)
|
||||
|
||||
#define for_each_relevant_dc(_dive, _dc) \
|
||||
bool _all_planned = !has_planned(_dive, false); \
|
||||
for (_dc = &_dive->dc; _dc; _dc = _dc->next) if (_all_planned || !is_dc_planner(_dc))
|
||||
for (_dc = &_dive->dc; _dc; _dc = _dc->next) if (!is_logged(_dive) || !is_dc_planner(_dc))
|
||||
|
||||
extern struct dive *get_dive_by_uniq_id(int id);
|
||||
extern int get_idx_by_uniq_id(int id);
|
||||
@ -187,7 +186,7 @@ extern int split_dive(const struct dive *dive, struct dive **new1, struct dive *
|
||||
extern int split_dive_at_time(const struct dive *dive, duration_t time, struct dive **new1, struct dive **new2);
|
||||
extern struct dive *merge_dives(const struct dive *a, const struct dive *b, int offset, bool prefer_downloaded, struct dive_trip **trip, struct dive_site **site);
|
||||
extern struct dive *try_to_merge(struct dive *a, struct dive *b, bool prefer_downloaded);
|
||||
extern void copy_events_until(const struct dive *sd, struct dive *dd, int time);
|
||||
extern void copy_events_until(const struct dive *sd, struct dive *dd, int dcNr, int time);
|
||||
extern void copy_used_cylinders(const struct dive *s, struct dive *d, bool used_only);
|
||||
extern bool is_cylinder_used(const struct dive *dive, int idx);
|
||||
extern bool is_cylinder_prot(const struct dive *dive, int idx);
|
||||
@ -207,7 +206,8 @@ extern void invalidate_dive_cache(struct dive *dc);
|
||||
|
||||
extern int total_weight(const struct dive *);
|
||||
|
||||
extern bool has_planned(const struct dive *dive, bool planned);
|
||||
extern bool is_planned(const struct dive *dive);
|
||||
extern bool is_logged(const struct dive *dive);
|
||||
|
||||
/* Get gasmixes at increasing timestamps.
|
||||
* In "evp", pass a pointer to a "struct event *" which is NULL-initialized on first invocation.
|
||||
|
||||
@ -492,11 +492,6 @@ void add_extra_data(struct divecomputer *dc, const char *key, const char *value)
|
||||
}
|
||||
}
|
||||
|
||||
bool is_dc_planner(const struct divecomputer *dc)
|
||||
{
|
||||
return same_string(dc->model, "planned dive");
|
||||
}
|
||||
|
||||
/*
|
||||
* Match two dive computer entries against each other, and
|
||||
* tell if it's the same dive. Return 0 if "don't know",
|
||||
@ -548,14 +543,27 @@ void free_dc(struct divecomputer *dc)
|
||||
free(dc);
|
||||
}
|
||||
|
||||
static const char *manual_dc_name = "manually added dive";
|
||||
bool is_manually_added_dc(const struct divecomputer *dc)
|
||||
static const char *planner_dc_name = "planned dive";
|
||||
|
||||
bool is_dc_planner(const struct divecomputer *dc)
|
||||
{
|
||||
return dc && dc->samples <= 50 &&
|
||||
same_string(dc->model, manual_dc_name);
|
||||
return dc && same_string(dc->model, planner_dc_name);
|
||||
}
|
||||
|
||||
void make_manually_added_dc(struct divecomputer *dc)
|
||||
void make_planner_dc(struct divecomputer *dc)
|
||||
{
|
||||
free((void *)dc->model);
|
||||
dc->model = strdup(planner_dc_name);
|
||||
}
|
||||
|
||||
const char *manual_dc_name = "manually added dive";
|
||||
|
||||
bool is_dc_manually_added_dive(const struct divecomputer *dc)
|
||||
{
|
||||
return dc && same_string(dc->model, manual_dc_name);
|
||||
}
|
||||
|
||||
void make_manually_added_dive_dc(struct divecomputer *dc)
|
||||
{
|
||||
free((void *)dc->model);
|
||||
dc->model = strdup(manual_dc_name);
|
||||
|
||||
@ -67,10 +67,12 @@ extern void add_event_to_dc(struct divecomputer *dc, struct event *ev);
|
||||
extern struct event *add_event(struct divecomputer *dc, unsigned int time, int type, int flags, int value, const char *name);
|
||||
extern void remove_event_from_dc(struct divecomputer *dc, struct event *event);
|
||||
extern void add_extra_data(struct divecomputer *dc, const char *key, const char *value);
|
||||
extern bool is_dc_planner(const struct divecomputer *dc);
|
||||
extern uint32_t calculate_string_hash(const char *str);
|
||||
extern bool is_manually_added_dc(const struct divecomputer *dc);
|
||||
extern void make_manually_added_dc(struct divecomputer *dc);
|
||||
extern bool is_dc_planner(const struct divecomputer *dc);
|
||||
extern void make_planner_dc(struct divecomputer *dc);
|
||||
extern const char *manual_dc_name;
|
||||
extern bool is_dc_manually_added_dive(const struct divecomputer *dc);
|
||||
extern void make_manually_added_dive_dc(struct divecomputer *dc);
|
||||
|
||||
/* Check if two dive computer entries are the exact same dive (-1=no/0=maybe/1=yes) */
|
||||
extern int match_one_dc(const struct divecomputer *a, const struct divecomputer *b);
|
||||
|
||||
@ -561,7 +561,7 @@ int init_decompression(struct deco_state *ds, const struct dive *dive, bool in_p
|
||||
}
|
||||
add_segment(ds, surface_pressure, air, surface_time, 0, OC, prefs.decosac, in_planner);
|
||||
#if DECO_CALC_DEBUG & 2
|
||||
printf("Tissues after surface intervall of %d:%02u:\n", FRACTION(surface_time, 60));
|
||||
printf("Tissues after surface intervall of %d:%02u:\n", FRACTION_TUPLE(surface_time, 60));
|
||||
dump_tissues(ds);
|
||||
#endif
|
||||
}
|
||||
@ -598,7 +598,7 @@ int init_decompression(struct deco_state *ds, const struct dive *dive, bool in_p
|
||||
}
|
||||
add_segment(ds, surface_pressure, air, surface_time, 0, OC, prefs.decosac, in_planner);
|
||||
#if DECO_CALC_DEBUG & 2
|
||||
printf("Tissues after surface intervall of %d:%02u:\n", FRACTION(surface_time, 60));
|
||||
printf("Tissues after surface intervall of %d:%02u:\n", FRACTION_TUPLE(surface_time, 60));
|
||||
dump_tissues(ds);
|
||||
#endif
|
||||
}
|
||||
@ -767,18 +767,6 @@ struct dive *unregister_dive(int idx)
|
||||
return dive;
|
||||
}
|
||||
|
||||
/* this implements the mechanics of removing the dive from the global
|
||||
* dive table and the trip, but doesn't deal with updating dive trips, etc */
|
||||
void delete_single_dive(int idx)
|
||||
{
|
||||
struct dive *dive = get_dive(idx);
|
||||
if (!dive)
|
||||
return; /* this should never happen */
|
||||
remove_dive_from_trip(dive, divelog.trips);
|
||||
unregister_dive_from_dive_site(dive);
|
||||
delete_dive_from_table(divelog.dives, idx);
|
||||
}
|
||||
|
||||
void process_loaded_dives()
|
||||
{
|
||||
sort_dive_table(divelog.dives);
|
||||
@ -989,7 +977,7 @@ void add_imported_dives(struct divelog *import_log, int flags)
|
||||
/* Remove old dives */
|
||||
for (i = 0; i < dives_to_remove.nr; i++) {
|
||||
idx = get_divenr(dives_to_remove.dives[i]);
|
||||
delete_single_dive(idx);
|
||||
delete_single_dive(&divelog, idx);
|
||||
}
|
||||
dives_to_remove.nr = 0;
|
||||
|
||||
@ -1019,6 +1007,10 @@ void add_imported_dives(struct divelog *import_log, int flags)
|
||||
current_dive = divelog.dives->nr > 0 ? divelog.dives->dives[divelog.dives->nr - 1] : NULL;
|
||||
|
||||
free_device_table(devices_to_add);
|
||||
free(dives_to_add.dives);
|
||||
free(dives_to_remove.dives);
|
||||
free(trips_to_add.trips);
|
||||
free(dive_sites_to_add.dive_sites);
|
||||
|
||||
/* Inform frontend of reset data. This should reset all the models. */
|
||||
emit_reset_signal();
|
||||
|
||||
@ -62,7 +62,6 @@ void clear_dive_file_data();
|
||||
void clear_dive_table(struct dive_table *table);
|
||||
void move_dive_table(struct dive_table *src, struct dive_table *dst);
|
||||
struct dive *unregister_dive(int idx);
|
||||
extern void delete_single_dive(int idx);
|
||||
extern bool has_dive(unsigned int deviceid, unsigned int diveid);
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
||||
@ -3,6 +3,7 @@
|
||||
#include "divelist.h"
|
||||
#include "divesite.h"
|
||||
#include "device.h"
|
||||
#include "errorhelper.h"
|
||||
#include "filterpreset.h"
|
||||
#include "trip.h"
|
||||
|
||||
@ -63,14 +64,28 @@ struct divelog &divelog::operator=(divelog &&log)
|
||||
return *this;
|
||||
}
|
||||
|
||||
/* this implements the mechanics of removing the dive from the
|
||||
* dive log and the trip, but doesn't deal with updating dive trips, etc */
|
||||
void delete_single_dive(struct divelog *log, int idx)
|
||||
{
|
||||
if (idx < 0 || idx > log->dives->nr) {
|
||||
report_info("Warning: deleting unexisting dive with index %d", idx);
|
||||
return;
|
||||
}
|
||||
struct dive *dive = log->dives->dives[idx];
|
||||
remove_dive_from_trip(dive, log->trips);
|
||||
unregister_dive_from_dive_site(dive);
|
||||
delete_dive_from_table(log->dives, idx);
|
||||
}
|
||||
|
||||
void divelog::clear()
|
||||
{
|
||||
while (dives->nr)
|
||||
delete_single_dive(0);
|
||||
while (dives->nr > 0)
|
||||
delete_single_dive(this, dives->nr - 1);
|
||||
while (sites->nr)
|
||||
delete_dive_site(get_dive_site(0, sites), sites);
|
||||
if (trips->nr != 0) {
|
||||
fprintf(stderr, "Warning: trip table not empty in divelog::clear()!\n");
|
||||
report_info("Warning: trip table not empty in divelog::clear()!");
|
||||
trips->nr = 0;
|
||||
}
|
||||
clear_device_table(devices);
|
||||
|
||||
@ -34,6 +34,7 @@ extern "C" {
|
||||
#endif
|
||||
|
||||
void clear_divelog(struct divelog *);
|
||||
extern void delete_single_dive(struct divelog *, int idx);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
|
||||
@ -4,6 +4,7 @@
|
||||
#include "dive.h"
|
||||
#include "divelist.h"
|
||||
#include "divelog.h"
|
||||
#include "errorhelper.h"
|
||||
#include "membuffer.h"
|
||||
#include "subsurface-string.h"
|
||||
#include "table.h"
|
||||
@ -371,17 +372,17 @@ void add_dive_to_dive_site(struct dive *d, struct dive_site *ds)
|
||||
{
|
||||
int idx;
|
||||
if (!d) {
|
||||
fprintf(stderr, "Warning: add_dive_to_dive_site called with NULL dive\n");
|
||||
report_info("Warning: add_dive_to_dive_site called with NULL dive");
|
||||
return;
|
||||
}
|
||||
if (!ds) {
|
||||
fprintf(stderr, "Warning: add_dive_to_dive_site called with NULL dive site\n");
|
||||
report_info("Warning: add_dive_to_dive_site called with NULL dive site");
|
||||
return;
|
||||
}
|
||||
if (d->dive_site == ds)
|
||||
return;
|
||||
if (d->dive_site) {
|
||||
fprintf(stderr, "Warning: adding dive that already belongs to a dive site to a different site\n");
|
||||
report_info("Warning: adding dive that already belongs to a dive site to a different site");
|
||||
unregister_dive_from_dive_site(d);
|
||||
}
|
||||
idx = dive_table_get_insertion_index(&ds->dives, d);
|
||||
|
||||
@ -9,7 +9,6 @@
|
||||
#include "errorhelper.h"
|
||||
#include "subsurface-string.h"
|
||||
#include "qthelper.h"
|
||||
#include <QDebug>
|
||||
#include <QJsonDocument>
|
||||
#include <QJsonArray>
|
||||
#include <QJsonObject>
|
||||
@ -124,7 +123,6 @@ taxonomy_data reverseGeoLookup(degrees_t latitude, degrees_t longitude)
|
||||
}
|
||||
} else {
|
||||
report_error("geonames.org did not provide reverse lookup information");
|
||||
//qDebug() << "no reverse geo lookup; geonames returned\n" << fullReply;
|
||||
}
|
||||
|
||||
return taxonomy;
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
#include "downloadfromdcthread.h"
|
||||
#include "core/errorhelper.h"
|
||||
#include "core/libdivecomputer.h"
|
||||
#include "core/qthelper.h"
|
||||
#include "core/range.h"
|
||||
#include "core/settings/qPrefDiveComputer.h"
|
||||
#include "core/divelist.h"
|
||||
#include <QDebug>
|
||||
#if defined(Q_OS_ANDROID)
|
||||
#include "core/subsurface-string.h"
|
||||
#endif
|
||||
@ -91,7 +91,7 @@ void DownloadThread::run()
|
||||
internalData->log = &log;
|
||||
internalData->btname = strdup(m_data->devBluetoothName().toUtf8());
|
||||
if (!internalData->descriptor) {
|
||||
qDebug() << "No download possible when DC type is unknown";
|
||||
report_info("No download possible when DC type is unknown");
|
||||
return;
|
||||
}
|
||||
// get the list of transports that this device supports and filter depending on Bluetooth option
|
||||
@ -103,8 +103,8 @@ void DownloadThread::run()
|
||||
if (transports == DC_TRANSPORT_USBHID)
|
||||
internalData->devname = "";
|
||||
|
||||
qDebug() << "Starting download from " << getTransportString(transports);
|
||||
qDebug() << "downloading" << (internalData->force_download ? "all" : "only new") << "dives";
|
||||
report_info("Starting download from %s", qPrintable(getTransportString(transports)));
|
||||
report_info("downloading %s dives", internalData->force_download ? "all" : "only new");
|
||||
clear_divelog(&log);
|
||||
|
||||
Q_ASSERT(internalData->log != nullptr);
|
||||
@ -117,11 +117,11 @@ void DownloadThread::run()
|
||||
errorText = do_libdivecomputer_import(internalData);
|
||||
if (errorText) {
|
||||
error = str_error(errorText, internalData->devname, internalData->vendor, internalData->product);
|
||||
qDebug() << "Finishing download thread:" << error;
|
||||
report_info("Finishing download thread: %s", qPrintable(error));
|
||||
} else {
|
||||
if (!log.dives->nr)
|
||||
error = tr("No new dives downloaded from dive computer");
|
||||
qDebug() << "Finishing download thread:" << log.dives->nr << "dives downloaded";
|
||||
report_info("Finishing download thread: %d dives downloaded", log.dives->nr);
|
||||
}
|
||||
qPrefDiveComputer::set_vendor(internalData->vendor);
|
||||
qPrefDiveComputer::set_product(internalData->product);
|
||||
@ -193,7 +193,7 @@ void fill_computer_list()
|
||||
void show_computer_list()
|
||||
{
|
||||
unsigned int transportMask = get_supported_transports(NULL);
|
||||
qDebug() << "Supported dive computers:";
|
||||
report_info("Supported dive computers:");
|
||||
for (const QString &vendor: vendorList) {
|
||||
QString msg = vendor + ": ";
|
||||
for (const QString &product: productList[vendor]) {
|
||||
@ -203,7 +203,7 @@ void show_computer_list()
|
||||
msg += product + " (" + transportString + "), ";
|
||||
}
|
||||
msg.chop(2);
|
||||
qDebug() << msg;
|
||||
report_info("%s", qPrintable(msg));
|
||||
}
|
||||
}
|
||||
|
||||
@ -274,7 +274,7 @@ QString DCDeviceData::devBluetoothName() const
|
||||
|
||||
QString DCDeviceData::descriptor() const
|
||||
{
|
||||
return "";
|
||||
return QString();
|
||||
}
|
||||
|
||||
bool DCDeviceData::bluetoothMode() const
|
||||
|
||||
@ -16,6 +16,7 @@
|
||||
#include "dive.h"
|
||||
#include "divelist.h"
|
||||
#include "divelog.h"
|
||||
#include "errorhelper.h"
|
||||
#include "pref.h"
|
||||
#include "subsurface-string.h"
|
||||
#include "table.h"
|
||||
@ -108,7 +109,7 @@ void add_tank_info_imperial(struct tank_info_table *table, const char *name, int
|
||||
add_to_tank_info_table(table, table->nr, info);
|
||||
}
|
||||
|
||||
extern struct tank_info *get_tank_info(struct tank_info_table *table, const char *name)
|
||||
static struct tank_info *get_tank_info(struct tank_info_table *table, const char *name)
|
||||
{
|
||||
for (int i = 0; i < table->nr; ++i) {
|
||||
if (same_string(table->infos[i].name, name))
|
||||
@ -117,34 +118,41 @@ extern struct tank_info *get_tank_info(struct tank_info_table *table, const char
|
||||
return NULL;
|
||||
}
|
||||
|
||||
extern void set_tank_info_size(struct tank_info_table *table, const char *name, volume_t size)
|
||||
extern void set_tank_info_data(struct tank_info_table *table, const char *name, volume_t size, pressure_t working_pressure)
|
||||
{
|
||||
struct tank_info *info = get_tank_info(table, name);
|
||||
if (info) {
|
||||
// Try to be smart about metric vs. imperial
|
||||
if (info->cuft == 0 && info->psi == 0)
|
||||
if (info->ml != 0 || info->bar != 0) {
|
||||
info->bar = working_pressure.mbar / 1000;
|
||||
info->ml = size.mliter;
|
||||
else
|
||||
info->cuft = lrint(ml_to_cuft(size.mliter));
|
||||
} else {
|
||||
info->psi = lrint(to_PSI(working_pressure));
|
||||
info->cuft = lrint(ml_to_cuft(size.mliter) * mbar_to_atm(working_pressure.mbar));
|
||||
}
|
||||
} else {
|
||||
// By default add metric...?
|
||||
add_tank_info_metric(table, name, size.mliter, 0);
|
||||
// Metric is a better choice as the volume is independent of the working pressure
|
||||
add_tank_info_metric(table, name, size.mliter, working_pressure.mbar / 1000);
|
||||
}
|
||||
}
|
||||
|
||||
extern void set_tank_info_workingpressure(struct tank_info_table *table, const char *name, pressure_t working_pressure)
|
||||
extern void extract_tank_info(const struct tank_info *info, volume_t *size, pressure_t *working_pressure)
|
||||
{
|
||||
working_pressure->mbar = info->bar != 0 ? info->bar * 1000 : psi_to_mbar(info->psi);
|
||||
if (info->ml != 0)
|
||||
size->mliter = info->ml;
|
||||
else if (working_pressure->mbar != 0)
|
||||
size->mliter = lrint(cuft_to_l(info->cuft) * 1000 / mbar_to_atm(working_pressure->mbar));
|
||||
}
|
||||
|
||||
extern bool get_tank_info_data(struct tank_info_table *table, const char *name, volume_t *size, pressure_t *working_pressure)
|
||||
{
|
||||
struct tank_info *info = get_tank_info(table, name);
|
||||
if (info) {
|
||||
// Try to be smart about metric vs. imperial
|
||||
if (info->cuft == 0 && info->psi == 0)
|
||||
info->bar = working_pressure.mbar / 1000;
|
||||
else
|
||||
info->psi = lrint(mbar_to_PSI(working_pressure.mbar));
|
||||
} else {
|
||||
// By default add metric...?
|
||||
add_tank_info_metric(table, name, 0, working_pressure.mbar / 1000);
|
||||
extract_tank_info(info, size, working_pressure);
|
||||
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/* placeholders for a few functions that we need to redesign for the Qt UI */
|
||||
@ -206,13 +214,6 @@ void add_cloned_weightsystem(struct weightsystem_table *t, weightsystem_t ws)
|
||||
add_to_weightsystem_table(t, t->nr, clone_weightsystem(ws));
|
||||
}
|
||||
|
||||
/* Add a clone of a weightsystem to the end of a weightsystem table.
|
||||
* Cloned means that the description-string is copied. */
|
||||
void add_cloned_weightsystem_at(struct weightsystem_table *t, weightsystem_t ws)
|
||||
{
|
||||
add_to_weightsystem_table(t, t->nr, clone_weightsystem(ws));
|
||||
}
|
||||
|
||||
cylinder_t clone_cylinder(cylinder_t cyl)
|
||||
{
|
||||
cylinder_t res = cyl;
|
||||
@ -460,7 +461,7 @@ cylinder_t *get_cylinder(const struct dive *d, int idx)
|
||||
* in the table to mark no-cylinder surface interavals. This is horrendous. Fix ASAP. */
|
||||
// if (idx < 0 || idx >= d->cylinders.nr) {
|
||||
if (idx < 0 || idx >= d->cylinders.nr + 1 || idx >= d->cylinders.allocated) {
|
||||
fprintf(stderr, "Warning: accessing invalid cylinder %d (%d existing)\n", idx, d->cylinders.nr);
|
||||
report_info("Warning: accessing invalid cylinder %d (%d existing)", idx, d->cylinders.nr);
|
||||
return NULL;
|
||||
}
|
||||
return &d->cylinders.cylinders[idx];
|
||||
@ -469,7 +470,7 @@ cylinder_t *get_cylinder(const struct dive *d, int idx)
|
||||
cylinder_t *get_or_create_cylinder(struct dive *d, int idx)
|
||||
{
|
||||
if (idx < 0) {
|
||||
fprintf(stderr, "Warning: accessing invalid cylinder %d\n", idx);
|
||||
report_info("Warning: accessing invalid cylinder %d", idx);
|
||||
return NULL;
|
||||
}
|
||||
while (idx >= d->cylinders.nr)
|
||||
@ -509,12 +510,38 @@ cylinder_t create_new_cylinder(const struct dive *d)
|
||||
cylinder_t cyl = empty_cylinder;
|
||||
fill_default_cylinder(d, &cyl);
|
||||
cyl.start = cyl.type.workingpressure;
|
||||
cyl.manually_added = true;
|
||||
cyl.cylinder_use = OC_GAS;
|
||||
return cyl;
|
||||
}
|
||||
|
||||
static bool show_cylinder(const struct dive *d, int i)
|
||||
cylinder_t create_new_manual_cylinder(const struct dive *d)
|
||||
{
|
||||
cylinder_t cyl = create_new_cylinder(d);
|
||||
cyl.manually_added = true;
|
||||
return cyl;
|
||||
}
|
||||
|
||||
void add_default_cylinder(struct dive *d)
|
||||
{
|
||||
// Only add if there are no cylinders yet
|
||||
if (d->cylinders.nr > 0)
|
||||
return;
|
||||
|
||||
cylinder_t cyl;
|
||||
if (!empty_string(prefs.default_cylinder)) {
|
||||
cyl = create_new_cylinder(d);
|
||||
} else {
|
||||
cyl = empty_cylinder;
|
||||
// roughly an AL80
|
||||
cyl.type.description = strdup(translate("gettextFromC", "unknown"));
|
||||
cyl.type.size.mliter = 11100;
|
||||
cyl.type.workingpressure.mbar = 207000;
|
||||
}
|
||||
add_cylinder(&d->cylinders, 0, cyl);
|
||||
reset_cylinders(d, false);
|
||||
}
|
||||
|
||||
static bool show_cylinder(const struct dive *d, int i)
|
||||
{
|
||||
if (is_cylinder_used(d, i))
|
||||
return true;
|
||||
|
||||
@ -93,7 +93,8 @@ extern void reset_cylinders(struct dive *dive, bool track_gas);
|
||||
extern int gas_volume(const cylinder_t *cyl, pressure_t p); /* Volume in mliter of a cylinder at pressure 'p' */
|
||||
extern int find_best_gasmix_match(struct gasmix mix, const struct cylinder_table *cylinders);
|
||||
extern void fill_default_cylinder(const struct dive *dive, cylinder_t *cyl); /* dive is needed to fill out MOD, which depends on salinity. */
|
||||
extern cylinder_t create_new_cylinder(const struct dive *dive); /* dive is needed to fill out MOD, which depends on salinity. */
|
||||
extern cylinder_t create_new_manual_cylinder(const struct dive *dive); /* dive is needed to fill out MOD, which depends on salinity. */
|
||||
extern void add_default_cylinder(struct dive *dive);
|
||||
extern int first_hidden_cylinder(const struct dive *d);
|
||||
#ifdef DEBUG_CYL
|
||||
extern void dump_cylinders(struct dive *dive, bool verbose);
|
||||
@ -125,9 +126,9 @@ extern void reset_tank_info_table(struct tank_info_table *table);
|
||||
extern void clear_tank_info_table(struct tank_info_table *table);
|
||||
extern void add_tank_info_metric(struct tank_info_table *table, const char *name, int ml, int bar);
|
||||
extern void add_tank_info_imperial(struct tank_info_table *table, const char *name, int cuft, int psi);
|
||||
extern void set_tank_info_size(struct tank_info_table *table, const char *name, volume_t size);
|
||||
extern void set_tank_info_workingpressure(struct tank_info_table *table, const char *name, pressure_t working_pressure);
|
||||
extern struct tank_info *get_tank_info(struct tank_info_table *table, const char *name);
|
||||
extern void extract_tank_info(const struct tank_info *info, volume_t *size, pressure_t *working_pressure);
|
||||
extern bool get_tank_info_data(struct tank_info_table *table, const char *name, volume_t *size, pressure_t *pressure);
|
||||
extern void set_tank_info_data(struct tank_info_table *table, const char *name, volume_t size, pressure_t working_pressure);
|
||||
|
||||
struct ws_info_t {
|
||||
const char *name;
|
||||
|
||||
@ -6,7 +6,6 @@
|
||||
#include <stdarg.h>
|
||||
#include "errorhelper.h"
|
||||
#include "membuffer.h"
|
||||
#include "qthelper.h"
|
||||
|
||||
#if !defined(Q_OS_ANDROID) && !defined(__ANDROID__)
|
||||
#define LOG_MSG(fmt, ...) fprintf(stderr, fmt, ##__VA_ARGS__)
|
||||
@ -21,7 +20,7 @@ int verbose;
|
||||
|
||||
void report_info(const char *fmt, ...)
|
||||
{
|
||||
struct membuffer buf = { 0 };
|
||||
struct membufferpp buf;
|
||||
|
||||
VA_BUF(&buf, fmt);
|
||||
strip_mb(&buf);
|
||||
@ -32,7 +31,7 @@ static void (*error_cb)(char *) = NULL;
|
||||
|
||||
int report_error(const char *fmt, ...)
|
||||
{
|
||||
struct membuffer buf = { 0 };
|
||||
struct membufferpp buf;
|
||||
|
||||
VA_BUF(&buf, fmt);
|
||||
strip_mb(&buf);
|
||||
@ -13,6 +13,7 @@
|
||||
#include "dive.h"
|
||||
#include "divelog.h"
|
||||
#include "subsurface-string.h"
|
||||
#include "format.h"
|
||||
#include "errorhelper.h"
|
||||
#include "file.h"
|
||||
#include "git-access.h"
|
||||
@ -124,7 +125,7 @@ static int try_to_open_db(const char *filename, std::string &mem, struct divelog
|
||||
retval = sqlite3_open(filename, &handle);
|
||||
|
||||
if (retval) {
|
||||
fprintf(stderr, "Database connection failed '%s'.\n", filename);
|
||||
report_info("Database connection failed '%s'", filename);
|
||||
return 1;
|
||||
}
|
||||
|
||||
@ -250,14 +251,14 @@ static int parse_file_buffer(const char *filename, std::string &mem, struct dive
|
||||
return parse_xml_buffer(filename, mem.data(), mem.size(), log, NULL);
|
||||
}
|
||||
|
||||
extern "C" bool remote_repo_uptodate(const char *filename, struct git_info *info)
|
||||
bool remote_repo_uptodate(const char *filename, struct git_info *info)
|
||||
{
|
||||
std::string current_sha = saved_git_id;
|
||||
|
||||
if (is_git_repository(filename, info) && open_git_repository(info)) {
|
||||
std::string sha = get_sha(info->repo, info->branch);
|
||||
if (!sha.empty() && current_sha == sha) {
|
||||
fprintf(stderr, "already have loaded SHA %s - don't load again\n", sha.c_str());
|
||||
report_info("already have loaded SHA %s - don't load again", sha.c_str());
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -278,14 +279,11 @@ extern "C" int parse_file(const char *filename, struct divelog *log)
|
||||
* Opening the cloud storage repository failed for some reason
|
||||
* give up here and don't send errors about git repositories
|
||||
*/
|
||||
if (info.is_subsurface_cloud) {
|
||||
cleanup_git_info(&info);
|
||||
if (info.is_subsurface_cloud)
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
int ret = git_load_dives(&info, log);
|
||||
cleanup_git_info(&info);
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -316,7 +314,7 @@ extern "C" int parse_file(const char *filename, struct divelog *log)
|
||||
std::string wl_name = std::string(filename, t - filename) + ".add";
|
||||
auto [wl_mem, err] = readfile(wl_name.c_str());
|
||||
if (err < 0) {
|
||||
fprintf(stderr, "No file %s found. No WLog extensions.\n", wl_name.c_str());
|
||||
report_info("No file %s found. No WLog extensions.", wl_name.c_str());
|
||||
wl_mem.clear();
|
||||
}
|
||||
return datatrak_import(mem, wl_mem, log);
|
||||
|
||||
@ -644,7 +644,7 @@ std::string filter_constraint_data_to_string(const filter_constraint *c)
|
||||
void filter_constraint_set_stringlist(filter_constraint &c, const QString &s)
|
||||
{
|
||||
if (!filter_constraint_is_string(c.type)) {
|
||||
fprintf(stderr, "Setting strings in non-string constraint!\n");
|
||||
report_info("Setting strings in non-string constraint!");
|
||||
return;
|
||||
}
|
||||
c.data.string_list->clear();
|
||||
@ -655,7 +655,7 @@ void filter_constraint_set_stringlist(filter_constraint &c, const QString &s)
|
||||
void filter_constraint_set_timestamp_from(filter_constraint &c, timestamp_t from)
|
||||
{
|
||||
if (!filter_constraint_is_timestamp(c.type)) {
|
||||
fprintf(stderr, "Setting timestamp from in non-timestamp constraint!\n");
|
||||
report_info("Setting timestamp from in non-timestamp constraint!");
|
||||
return;
|
||||
}
|
||||
c.data.timestamp_range.from = from;
|
||||
@ -664,7 +664,7 @@ void filter_constraint_set_timestamp_from(filter_constraint &c, timestamp_t from
|
||||
void filter_constraint_set_timestamp_to(filter_constraint &c, timestamp_t to)
|
||||
{
|
||||
if (!filter_constraint_is_timestamp(c.type)) {
|
||||
fprintf(stderr, "Setting timestamp to in non-timestamp constraint!\n");
|
||||
report_info("Setting timestamp to in non-timestamp constraint!");
|
||||
return;
|
||||
}
|
||||
c.data.timestamp_range.to = to;
|
||||
@ -673,7 +673,7 @@ void filter_constraint_set_timestamp_to(filter_constraint &c, timestamp_t to)
|
||||
void filter_constraint_set_integer_from(filter_constraint &c, int from)
|
||||
{
|
||||
if (!is_numerical_constraint(c.type)) {
|
||||
fprintf(stderr, "Setting integer from of non-numerical constraint!\n");
|
||||
report_info("Setting integer from of non-numerical constraint!");
|
||||
return;
|
||||
}
|
||||
c.data.numerical_range.from = from;
|
||||
@ -682,7 +682,7 @@ void filter_constraint_set_integer_from(filter_constraint &c, int from)
|
||||
void filter_constraint_set_integer_to(filter_constraint &c, int to)
|
||||
{
|
||||
if (!is_numerical_constraint(c.type)) {
|
||||
fprintf(stderr, "Setting integer to of non-numerical constraint!\n");
|
||||
report_info("Setting integer to of non-numerical constraint!");
|
||||
return;
|
||||
}
|
||||
c.data.numerical_range.to = to;
|
||||
@ -691,7 +691,7 @@ void filter_constraint_set_integer_to(filter_constraint &c, int to)
|
||||
void filter_constraint_set_float_from(filter_constraint &c, double from)
|
||||
{
|
||||
if (!is_numerical_constraint(c.type)) {
|
||||
fprintf(stderr, "Setting float from of non-numerical constraint!\n");
|
||||
report_info("Setting float from of non-numerical constraint!");
|
||||
return;
|
||||
}
|
||||
c.data.numerical_range.from = display_to_base_unit(from, c.type);
|
||||
@ -700,7 +700,7 @@ void filter_constraint_set_float_from(filter_constraint &c, double from)
|
||||
void filter_constraint_set_float_to(filter_constraint &c, double to)
|
||||
{
|
||||
if (!is_numerical_constraint(c.type)) {
|
||||
fprintf(stderr, "Setting float to of non-numerical constraint!\n");
|
||||
report_info("Setting float to of non-numerical constraint!");
|
||||
return;
|
||||
}
|
||||
c.data.numerical_range.to = display_to_base_unit(to, c.type);
|
||||
@ -709,7 +709,7 @@ void filter_constraint_set_float_to(filter_constraint &c, double to)
|
||||
void filter_constraint_set_multiple_choice(filter_constraint &c, uint64_t multiple_choice)
|
||||
{
|
||||
if (!filter_constraint_is_multiple_choice(c.type)) {
|
||||
fprintf(stderr, "Setting multiple-choice to of non-multiple-choice constraint!\n");
|
||||
report_info("Setting multiple-choice to of non-multiple-choice constraint!");
|
||||
return;
|
||||
}
|
||||
c.data.multiple_choice = multiple_choice;
|
||||
@ -718,7 +718,7 @@ void filter_constraint_set_multiple_choice(filter_constraint &c, uint64_t multip
|
||||
QString filter_constraint_get_string(const filter_constraint &c)
|
||||
{
|
||||
if (!filter_constraint_is_string(c.type)) {
|
||||
fprintf(stderr, "Getting string of non-string constraint!\n");
|
||||
report_info("Getting string of non-string constraint!");
|
||||
return QString();
|
||||
}
|
||||
return c.data.string_list->join(",");
|
||||
@ -727,7 +727,7 @@ QString filter_constraint_get_string(const filter_constraint &c)
|
||||
int filter_constraint_get_integer_from(const filter_constraint &c)
|
||||
{
|
||||
if (!is_numerical_constraint(c.type)) {
|
||||
fprintf(stderr, "Getting integer from of non-numerical constraint!\n");
|
||||
report_info("Getting integer from of non-numerical constraint!");
|
||||
return -1;
|
||||
}
|
||||
return c.data.numerical_range.from;
|
||||
@ -736,7 +736,7 @@ int filter_constraint_get_integer_from(const filter_constraint &c)
|
||||
int filter_constraint_get_integer_to(const filter_constraint &c)
|
||||
{
|
||||
if (!is_numerical_constraint(c.type)) {
|
||||
fprintf(stderr, "Getting integer to of non-numerical constraint!\n");
|
||||
report_info("Getting integer to of non-numerical constraint!");
|
||||
return -1;
|
||||
}
|
||||
return c.data.numerical_range.to;
|
||||
@ -745,7 +745,7 @@ int filter_constraint_get_integer_to(const filter_constraint &c)
|
||||
double filter_constraint_get_float_from(const filter_constraint &c)
|
||||
{
|
||||
if (!is_numerical_constraint(c.type)) {
|
||||
fprintf(stderr, "Getting float from of non-numerical constraint!\n");
|
||||
report_info("Getting float from of non-numerical constraint!");
|
||||
return 0.0;
|
||||
}
|
||||
return base_to_display_unit(c.data.numerical_range.from, c.type);
|
||||
@ -754,7 +754,7 @@ double filter_constraint_get_float_from(const filter_constraint &c)
|
||||
double filter_constraint_get_float_to(const filter_constraint &c)
|
||||
{
|
||||
if (!is_numerical_constraint(c.type)) {
|
||||
fprintf(stderr, "Getting float to of non-numerical constraint!\n");
|
||||
report_info("Getting float to of non-numerical constraint!");
|
||||
return 0.0;
|
||||
}
|
||||
return base_to_display_unit(c.data.numerical_range.to, c.type);
|
||||
@ -763,7 +763,7 @@ double filter_constraint_get_float_to(const filter_constraint &c)
|
||||
timestamp_t filter_constraint_get_timestamp_from(const filter_constraint &c)
|
||||
{
|
||||
if (!filter_constraint_is_timestamp(c.type)) {
|
||||
fprintf(stderr, "Getting timestamp from of non-timestamp constraint!\n");
|
||||
report_info("Getting timestamp from of non-timestamp constraint!");
|
||||
return 0;
|
||||
}
|
||||
return c.data.timestamp_range.from;
|
||||
@ -772,7 +772,7 @@ timestamp_t filter_constraint_get_timestamp_from(const filter_constraint &c)
|
||||
timestamp_t filter_constraint_get_timestamp_to(const filter_constraint &c)
|
||||
{
|
||||
if (!filter_constraint_is_timestamp(c.type)) {
|
||||
fprintf(stderr, "Getting timestamp to of non-timestamp constraint!\n");
|
||||
report_info("Getting timestamp to of non-timestamp constraint!");
|
||||
return 0;
|
||||
}
|
||||
return c.data.timestamp_range.to;
|
||||
@ -781,7 +781,7 @@ timestamp_t filter_constraint_get_timestamp_to(const filter_constraint &c)
|
||||
uint64_t filter_constraint_get_multiple_choice(const filter_constraint &c)
|
||||
{
|
||||
if (!filter_constraint_is_multiple_choice(c.type)) {
|
||||
fprintf(stderr, "Getting multiple-choice of non-multiple choice constraint!\n");
|
||||
report_info("Getting multiple-choice of non-multiple choice constraint!");
|
||||
return 0;
|
||||
}
|
||||
return c.data.multiple_choice;
|
||||
@ -819,7 +819,7 @@ static bool has_tags(const filter_constraint &c, const struct dive *d)
|
||||
{
|
||||
QStringList dive_tags;
|
||||
for (const tag_entry *tag = d->tag_list; tag; tag = tag->next)
|
||||
dive_tags.push_back(QString(tag->tag->name).trimmed());
|
||||
dive_tags.push_back(QString::fromStdString(tag->tag->name).trimmed());
|
||||
dive_tags.append(gettextFromC::tr(divemode_text_ui[d->dc.divemode]).trimmed());
|
||||
return check(c, dive_tags);
|
||||
}
|
||||
@ -1074,9 +1074,9 @@ bool filter_constraint_match_dive(const filter_constraint &c, const struct dive
|
||||
case FILTER_CONSTRAINT_SAC:
|
||||
return check_numerical_range_non_zero(c, d->sac);
|
||||
case FILTER_CONSTRAINT_LOGGED:
|
||||
return has_planned(d, false) != c.negate;
|
||||
return is_logged(d) != c.negate;
|
||||
case FILTER_CONSTRAINT_PLANNED:
|
||||
return has_planned(d, true) != c.negate;
|
||||
return is_planned(d) != c.negate;
|
||||
case FILTER_CONSTRAINT_DIVE_MODE:
|
||||
return check_multiple_choice(c, (int)d->dc.divemode); // should we be smarter and check all DCs?
|
||||
case FILTER_CONSTRAINT_TAGS:
|
||||
|
||||
@ -343,67 +343,30 @@ QString vqasprintf_loc(const char *fmt, va_list ap_in)
|
||||
return ret;
|
||||
}
|
||||
|
||||
// Put a formated string respecting the default locale into a C-style array in UTF-8 encoding.
|
||||
// The only complication arises from the fact that we don't want to cut through multi-byte UTF-8 code points.
|
||||
extern "C" int snprintf_loc(char *dst, size_t size, const char *cformat, ...)
|
||||
// TODO: Avoid back-and-forth conversion between UTF16 and UTF8.
|
||||
std::string casprintf_loc(const char *cformat, ...)
|
||||
{
|
||||
va_list ap;
|
||||
va_start(ap, cformat);
|
||||
int res = vsnprintf_loc(dst, size, cformat, ap);
|
||||
va_end(ap);
|
||||
return res;
|
||||
}
|
||||
|
||||
extern "C" int vsnprintf_loc(char *dst, size_t size, const char *cformat, va_list ap)
|
||||
{
|
||||
QByteArray utf8 = vqasprintf_loc(cformat, ap).toUtf8();
|
||||
const char *data = utf8.constData();
|
||||
size_t utf8_size = utf8.size();
|
||||
if (size == 0)
|
||||
return utf8_size;
|
||||
if (size < utf8_size + 1) {
|
||||
memcpy(dst, data, size - 1);
|
||||
if ((data[size - 1] & 0xC0) == 0x80) {
|
||||
// We truncated a multi-byte UTF-8 encoding.
|
||||
--size;
|
||||
// Jump to last copied byte.
|
||||
if (size > 0)
|
||||
--size;
|
||||
while(size > 0 && (dst[size] & 0xC0) == 0x80)
|
||||
--size;
|
||||
dst[size] = 0;
|
||||
} else {
|
||||
dst[size - 1] = 0;
|
||||
}
|
||||
} else {
|
||||
memcpy(dst, data, utf8_size + 1); // QByteArray guarantees a trailing 0
|
||||
}
|
||||
return utf8_size;
|
||||
va_end(ap);
|
||||
return std::string(utf8.constData(), utf8.size());
|
||||
}
|
||||
|
||||
int asprintf_loc(char **dst, const char *cformat, ...)
|
||||
std::string __printf(1, 2) format_string_std(const char *fmt, ...)
|
||||
{
|
||||
va_list ap;
|
||||
va_start(ap, cformat);
|
||||
int res = vasprintf_loc(dst, cformat, ap);
|
||||
va_start(ap, fmt);
|
||||
size_t stringsize = vsnprintf(NULL, 0, fmt, ap);
|
||||
va_end(ap);
|
||||
if (stringsize == 0)
|
||||
return std::string();
|
||||
std::string res;
|
||||
res.resize(stringsize); // Pointless clearing, oh my.
|
||||
// This overwrites the terminal null-byte of std::string.
|
||||
// That's probably "undefined behavior". Oh my.
|
||||
va_start(ap, fmt);
|
||||
vsnprintf(res.data(), stringsize + 1, fmt, ap);
|
||||
va_end(ap);
|
||||
return res;
|
||||
}
|
||||
|
||||
int vasprintf_loc(char **dst, const char *cformat, va_list ap)
|
||||
{
|
||||
QByteArray utf8 = vqasprintf_loc(cformat, ap).toUtf8();
|
||||
*dst = strdup(utf8.constData());
|
||||
return utf8.size();
|
||||
}
|
||||
|
||||
extern "C" void put_vformat_loc(struct membuffer *b, const char *fmt, va_list args)
|
||||
{
|
||||
QByteArray utf8 = vqasprintf_loc(fmt, args).toUtf8();
|
||||
const char *data = utf8.constData();
|
||||
size_t utf8_size = utf8.size();
|
||||
|
||||
make_room(b, utf8_size);
|
||||
memcpy(b->buffer + b->len, data, utf8_size);
|
||||
b->len += utf8_size;
|
||||
}
|
||||
|
||||
@ -11,19 +11,8 @@
|
||||
#include <QString>
|
||||
__printf(1, 2) QString qasprintf_loc(const char *cformat, ...);
|
||||
__printf(1, 0) QString vqasprintf_loc(const char *cformat, va_list ap);
|
||||
#endif
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
__printf(3, 4) int snprintf_loc(char *dst, size_t size, const char *cformat, ...);
|
||||
__printf(3, 0) int vsnprintf_loc(char *dst, size_t size, const char *cformat, va_list ap);
|
||||
__printf(2, 3) int asprintf_loc(char **dst, const char *cformat, ...);
|
||||
__printf(2, 0) int vasprintf_loc(char **dst, const char *cformat, va_list ap);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
__printf(1, 2) std::string casprintf_loc(const char *cformat, ...);
|
||||
__printf(1, 2) std::string format_string_std(const char *fmt, ...);
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
||||
@ -128,7 +128,7 @@ static std::vector<QString> getWords(const dive *d)
|
||||
tokenize(QString(d->buddy), res);
|
||||
tokenize(QString(d->suit), res);
|
||||
for (const tag_entry *tag = d->tag_list; tag; tag = tag->next)
|
||||
tokenize(QString(tag->tag->name), res);
|
||||
tokenize(QString::fromStdString(tag->tag->name), res);
|
||||
for (int i = 0; i < d->cylinders.nr; ++i) {
|
||||
const cylinder_t &cyl = *get_cylinder(d, i);
|
||||
tokenize(QString(cyl.type.description), res);
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
// SPDX-License-Identifier: GPL-2.0
|
||||
#include "gas.h"
|
||||
#include "pref.h"
|
||||
#include "errorhelper.h"
|
||||
#include "gettext.h"
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
@ -59,7 +60,7 @@ void sanitize_gasmix(struct gasmix *mix)
|
||||
/* Sane mix? */
|
||||
if (o2 <= 1000 && he <= 1000 && o2 + he <= 1000)
|
||||
return;
|
||||
fprintf(stderr, "Odd gasmix: %u O2 %u He\n", o2, he);
|
||||
report_info("Odd gasmix: %u O2 %u He", o2, he);
|
||||
memset(mix, 0, sizeof(*mix));
|
||||
}
|
||||
|
||||
@ -117,7 +118,7 @@ int pscr_o2(const double amb_pressure, struct gasmix mix)
|
||||
* *pressures = structure for communicating o2 sensor values from and gas pressures to the calling function.
|
||||
* *mix = structure containing cylinder gas mixture information.
|
||||
* divemode = the dive mode pertaining to this point in the dive profile.
|
||||
* This function called by: calculate_gas_information_new() in profile.c; add_segment() in deco.c.
|
||||
* This function called by: calculate_gas_information_new() in profile.cpp; add_segment() in deco.cpp.
|
||||
*/
|
||||
void fill_pressures(struct gas_pressures *pressures, const double amb_pressure, struct gasmix mix, double po2, enum divemode_t divemode)
|
||||
{
|
||||
|
||||
@ -2,9 +2,9 @@
|
||||
/* gaspressures.c
|
||||
* ---------------
|
||||
* This file contains the routines to calculate the gas pressures in the cylinders.
|
||||
* The functions below support the code in profile.c.
|
||||
* The functions below support the code in profile.cpp.
|
||||
* The high-level function is populate_pressure_information(), called by function
|
||||
* create_plot_info_new() in profile.c. The other functions below are, in turn,
|
||||
* create_plot_info_new() in profile.cpp. The other functions below are, in turn,
|
||||
* called by populate_pressure_information(). The calling sequence is as follows:
|
||||
*
|
||||
* populate_pressure_information() -> calc_pressure_time()
|
||||
@ -102,8 +102,8 @@ static void dump_pr_track(int cyl, pr_track_t *track_pr)
|
||||
printf(" start %f end %f t_start %d:%02d t_end %d:%02d pt %d\n",
|
||||
mbar_to_PSI(list->start),
|
||||
mbar_to_PSI(list->end),
|
||||
FRACTION(list->t_start, 60),
|
||||
FRACTION(list->t_end, 60),
|
||||
FRACTION_TUPLE(list->t_start, 60),
|
||||
FRACTION_TUPLE(list->t_end, 60),
|
||||
list->pressure_time);
|
||||
list = list->next;
|
||||
}
|
||||
@ -362,7 +362,7 @@ static void debug_print_pressures(struct plot_info *pi)
|
||||
* pr_track_alloc structures for each cylinder. These pr_track_alloc structures ultimately allow for filling
|
||||
* the missing tank pressure values on the dive profile using the depth_pressure of the dive. To do this, it
|
||||
* calculates the summed pressure-time value for the duration of the dive and stores these * in the pr_track_alloc
|
||||
* structures. This function is called by create_plot_info_new() in profile.c
|
||||
* structures. This function is called by create_plot_info_new() in profile.cpp
|
||||
*/
|
||||
void populate_pressure_information(const struct dive *dive, const struct divecomputer *dc, struct plot_info *pi, int sensor)
|
||||
{
|
||||
|
||||
@ -17,10 +17,13 @@
|
||||
#include <fcntl.h>
|
||||
#include <stdarg.h>
|
||||
#include <git2.h>
|
||||
#include <QString>
|
||||
#include <QRegularExpression>
|
||||
#include <QNetworkProxy>
|
||||
|
||||
#include "subsurface-string.h"
|
||||
#include "format.h"
|
||||
#include "membuffer.h"
|
||||
#include "strndup.h"
|
||||
#include "qthelper.h"
|
||||
#include "file.h"
|
||||
#include "errorhelper.h"
|
||||
@ -117,12 +120,20 @@ static int transfer_progress_cb(const git_transfer_progress *stats, void *)
|
||||
// the initial push to sync the repos is mapped to 10% of overall progress
|
||||
static int push_transfer_progress_cb(unsigned int current, unsigned int total, size_t, void *)
|
||||
{
|
||||
char buf[80];
|
||||
snprintf(buf, sizeof(buf), translate("gettextFromC", "Transfer to storage (%d/%d)"), current, total);
|
||||
return git_storage_update_progress(buf);
|
||||
std::string buf = casprintf_loc(translate("gettextFromC", "Transfer to storage (%d/%d)"), current, total);
|
||||
return git_storage_update_progress(buf.c_str());
|
||||
}
|
||||
|
||||
extern "C" char *get_local_dir(const char *url, const char *branch)
|
||||
std::string normalize_cloud_name(const std::string &remote_in)
|
||||
{
|
||||
// replace ssrf-cloud-XX.subsurface... names with cloud.subsurface... names
|
||||
// that trailing '/' is to match old code
|
||||
QString ri = QString::fromStdString(remote_in);
|
||||
ri.replace(QRegularExpression(CLOUD_HOST_PATTERN), CLOUD_HOST_GENERIC "/");
|
||||
return ri.toStdString();
|
||||
}
|
||||
|
||||
std::string get_local_dir(const std::string &url, const std::string &branch)
|
||||
{
|
||||
SHA_CTX ctx;
|
||||
unsigned char hash[20];
|
||||
@ -133,9 +144,6 @@ extern "C" char *get_local_dir(const char *url, const char *branch)
|
||||
// which server to pick changed, or because the user is on a different continent),
|
||||
// then the hash and therefore the local directory would change. To prevent that
|
||||
// from happening, normalize the cloud string to always use the old default name.
|
||||
// That's trivial with QString operations and painful to do right in plain C, so
|
||||
// let's be lazy and call a C++ helper function
|
||||
// just remember to free the string we get back
|
||||
std::string remote = normalize_cloud_name(url);
|
||||
|
||||
// That zero-byte update is so that we don't get hash
|
||||
@ -143,19 +151,18 @@ extern "C" char *get_local_dir(const char *url, const char *branch)
|
||||
SHA1_Init(&ctx);
|
||||
SHA1_Update(&ctx, remote.c_str(), remote.size());
|
||||
SHA1_Update(&ctx, "", 1);
|
||||
SHA1_Update(&ctx, branch, strlen(branch));
|
||||
SHA1_Update(&ctx, branch.c_str(), branch.size());
|
||||
SHA1_Final(hash, &ctx);
|
||||
return format_string("%s/cloudstorage/%02x%02x%02x%02x%02x%02x%02x%02x",
|
||||
return format_string_std("%s/cloudstorage/%02x%02x%02x%02x%02x%02x%02x%02x",
|
||||
system_default_directory(),
|
||||
hash[0], hash[1], hash[2], hash[3],
|
||||
hash[4], hash[5], hash[6], hash[7]);
|
||||
}
|
||||
|
||||
static char *move_local_cache(struct git_info *info)
|
||||
static std::string move_local_cache(struct git_info *info)
|
||||
{
|
||||
char *old_path = get_local_dir(info->url, info->branch);
|
||||
char *new_path = move_away(old_path);
|
||||
free(old_path);
|
||||
std::string old_path = get_local_dir(info->url, info->branch);
|
||||
std::string new_path = move_away(old_path);
|
||||
return new_path;
|
||||
}
|
||||
|
||||
@ -248,15 +255,12 @@ extern "C" int credential_ssh_cb(git_cred **out,
|
||||
|
||||
// TODO: We need a way to differentiate between password and private key authentication
|
||||
if (allowed_types & GIT_CREDTYPE_SSH_KEY) {
|
||||
char *priv_key = format_string("%s/%s", system_default_directory(), "ssrf_remote.key");
|
||||
if (!access(priv_key, F_OK)) {
|
||||
std::string priv_key = std::string(system_default_directory()) + "/ssrf_remote.key";
|
||||
if (!access(priv_key.c_str(), F_OK)) {
|
||||
if (exceeded_auth_attempts())
|
||||
return GIT_EUSER;
|
||||
int ret = git_cred_ssh_key_new(out, username, NULL, priv_key, passphrase);
|
||||
free(priv_key);
|
||||
return ret;
|
||||
return git_cred_ssh_key_new(out, username, NULL, priv_key.c_str(), passphrase);
|
||||
}
|
||||
free(priv_key);
|
||||
}
|
||||
|
||||
if (allowed_types & GIT_CREDTYPE_USERPASS_PLAINTEXT) {
|
||||
@ -346,7 +350,7 @@ static int try_to_git_merge(struct git_info *info, git_reference **local_p, git_
|
||||
git_commit *local_commit, *remote_commit, *base_commit;
|
||||
git_index *merged_index;
|
||||
git_merge_options merge_options;
|
||||
struct membuffer msg = { 0, 0, NULL};
|
||||
struct membufferpp msg;
|
||||
|
||||
if (verbose) {
|
||||
char outlocal[41], outremote[41];
|
||||
@ -450,7 +454,6 @@ static int try_to_git_merge(struct git_info *info, git_reference **local_p, git_
|
||||
git_signature_free(author);
|
||||
if (verbose)
|
||||
report_info("git storage: successfully merged repositories");
|
||||
free_buffer(&msg);
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -458,7 +461,6 @@ diverged_error:
|
||||
return report_error("%s", translate("gettextFromC", "Remote storage and local data diverged"));
|
||||
|
||||
write_error:
|
||||
free_buffer(&msg);
|
||||
return report_error(translate("gettextFromC", "Remote storage and local data diverged. Error: writing the data failed (%s)"), giterr_last()->message);
|
||||
}
|
||||
|
||||
@ -467,11 +469,10 @@ write_error:
|
||||
// and ask them to retry the operation (which will then refresh the data from the cloud server)
|
||||
static int cleanup_local_cache(struct git_info *info)
|
||||
{
|
||||
char *backup_path = move_local_cache(info);
|
||||
report_info("git storage: problems with local cache, moved to %s", backup_path);
|
||||
std::string backup_path = move_local_cache(info);
|
||||
report_info("git storage: problems with local cache, moved to %s", backup_path.c_str());
|
||||
report_error("%s", translate("gettextFromC", "Problems with local cache of Subsurface cloud data"));
|
||||
report_error(translate("gettextFromC", "Moved cache data to %s. Please try the operation again."), backup_path);
|
||||
free(backup_path);
|
||||
report_error(translate("gettextFromC", "Moved cache data to %s. Please try the operation again."), backup_path.c_str());
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -576,17 +577,17 @@ static int check_remote_status(struct git_info *info, git_remote *origin)
|
||||
if (verbose)
|
||||
report_info("git storage: check remote status\n");
|
||||
|
||||
if (git_branch_lookup(&local_ref, info->repo, info->branch, GIT_BRANCH_LOCAL)) {
|
||||
report_info("git storage: branch %s is missing in local repo", info->branch);
|
||||
if (git_branch_lookup(&local_ref, info->repo, info->branch.c_str(), GIT_BRANCH_LOCAL)) {
|
||||
report_info("git storage: branch %s is missing in local repo", info->branch.c_str());
|
||||
if (info->is_subsurface_cloud)
|
||||
return cleanup_local_cache(info);
|
||||
else
|
||||
return report_error("Git cache branch %s no longer exists", info->branch);
|
||||
return report_error("Git cache branch %s no longer exists", info->branch.c_str());
|
||||
}
|
||||
if (git_branch_upstream(&remote_ref, local_ref)) {
|
||||
/* so there is no upstream branch for our branch; that's a problem.
|
||||
* let's push our branch */
|
||||
report_info("git storage: branch %s is missing in remote, pushing branch", info->branch);
|
||||
report_info("git storage: branch %s is missing in remote, pushing branch", info->branch.c_str());
|
||||
git_strarray refspec;
|
||||
git_reference_list(&refspec, info->repo);
|
||||
git_push_options opts = GIT_PUSH_OPTIONS_INIT;
|
||||
@ -608,62 +609,69 @@ static int check_remote_status(struct git_info *info, git_remote *origin)
|
||||
return error;
|
||||
}
|
||||
|
||||
static std::string getProxyString()
|
||||
{
|
||||
if (prefs.proxy_type == QNetworkProxy::HttpProxy) {
|
||||
if (prefs.proxy_auth)
|
||||
return format_string_std("http://%s:%s@%s:%d", prefs.proxy_user, prefs.proxy_pass,
|
||||
prefs.proxy_host, prefs.proxy_port);
|
||||
else
|
||||
return format_string_std("http://%s:%d", prefs.proxy_host, prefs.proxy_port);
|
||||
}
|
||||
return std::string();
|
||||
}
|
||||
|
||||
/* this is (so far) only used by the git storage tests to remove a remote branch
|
||||
* it will print out errors, but not return an error (as this isn't a function that
|
||||
* we test as part of the tests, it's a helper to not leave loads of dead branches on
|
||||
* the server)
|
||||
*/
|
||||
extern "C" void delete_remote_branch(git_repository *repo, const char *remote, const char *branch)
|
||||
void delete_remote_branch(git_repository *repo, const std::string &remote, const std::string &branch)
|
||||
{
|
||||
int error;
|
||||
char *proxy_string;
|
||||
git_remote *origin;
|
||||
git_config *conf;
|
||||
|
||||
/* set up the config and proxy information in order to connect to the server */
|
||||
git_repository_config(&conf, repo);
|
||||
if (getProxyString(&proxy_string)) {
|
||||
git_config_set_string(conf, "http.proxy", proxy_string);
|
||||
free(proxy_string);
|
||||
std::string proxy_string = getProxyString();
|
||||
if (!proxy_string.empty()) {
|
||||
git_config_set_string(conf, "http.proxy", proxy_string.c_str());
|
||||
} else {
|
||||
git_config_delete_entry(conf, "http.proxy");
|
||||
}
|
||||
if (git_remote_lookup(&origin, repo, "origin")) {
|
||||
report_info("git storage: repository '%s' origin lookup failed (%s)", remote, giterr_last() ? giterr_last()->message : "(unspecified)");
|
||||
report_info("git storage: repository '%s' origin lookup failed (%s)", remote.c_str(), giterr_last() ? giterr_last()->message : "(unspecified)");
|
||||
return;
|
||||
}
|
||||
/* fetch the remote state */
|
||||
git_fetch_options f_opts = GIT_FETCH_OPTIONS_INIT;
|
||||
auth_attempt = 0;
|
||||
f_opts.callbacks.credentials = credential_https_cb;
|
||||
error = git_remote_fetch(origin, NULL, &f_opts, NULL);
|
||||
if (error) {
|
||||
if (git_remote_fetch(origin, NULL, &f_opts, NULL)) {
|
||||
report_info("git storage: remote fetch failed (%s)\n", giterr_last() ? giterr_last()->message : "authentication failed");
|
||||
return;
|
||||
}
|
||||
/* delete the remote branch by pushing to ":refs/heads/<branch>" */
|
||||
git_strarray refspec;
|
||||
char *branch_ref = format_string(":refs/heads/%s", branch);
|
||||
std::string branch_ref = std::string(":refs/heads/") + branch;
|
||||
char *dummy = branch_ref.data();
|
||||
refspec.count = 1;
|
||||
refspec.strings = &branch_ref;
|
||||
refspec.strings = &dummy;
|
||||
git_push_options p_opts = GIT_PUSH_OPTIONS_INIT;
|
||||
auth_attempt = 0;
|
||||
p_opts.callbacks.credentials = credential_https_cb;
|
||||
error = git_remote_push(origin, &refspec, &p_opts);
|
||||
free(branch_ref);
|
||||
if (error) {
|
||||
report_info("git storage: unable to delete branch '%s'", branch);
|
||||
if (git_remote_push(origin, &refspec, &p_opts)) {
|
||||
report_info("git storage: unable to delete branch '%s'", branch.c_str());
|
||||
report_info("git storage: error was (%s)\n", giterr_last() ? giterr_last()->message : "(unspecified)");
|
||||
}
|
||||
git_remote_free(origin);
|
||||
return;
|
||||
}
|
||||
|
||||
extern "C" int sync_with_remote(struct git_info *info)
|
||||
int sync_with_remote(struct git_info *info)
|
||||
{
|
||||
int error;
|
||||
git_remote *origin;
|
||||
char *proxy_string;
|
||||
git_config *conf;
|
||||
|
||||
if (git_local_only) {
|
||||
@ -672,14 +680,14 @@ extern "C" int sync_with_remote(struct git_info *info)
|
||||
return 0;
|
||||
}
|
||||
if (verbose)
|
||||
report_info("git storage: sync with remote %s[%s]\n", info->url, info->branch);
|
||||
report_info("git storage: sync with remote %s[%s]\n", info->url.c_str(), info->branch.c_str());
|
||||
git_storage_update_progress(translate("gettextFromC", "Sync with cloud storage"));
|
||||
git_repository_config(&conf, info->repo);
|
||||
if (info->transport == RT_HTTPS && getProxyString(&proxy_string)) {
|
||||
std::string proxy_string = getProxyString();
|
||||
if (info->transport == RT_HTTPS && !proxy_string.empty()) {
|
||||
if (verbose)
|
||||
report_info("git storage: set proxy to \"%s\"\n", proxy_string);
|
||||
git_config_set_string(conf, "http.proxy", proxy_string);
|
||||
free(proxy_string);
|
||||
report_info("git storage: set proxy to \"%s\"\n", proxy_string.c_str());
|
||||
git_config_set_string(conf, "http.proxy", proxy_string.c_str());
|
||||
} else {
|
||||
if (verbose)
|
||||
report_info("git storage: delete proxy setting\n");
|
||||
@ -693,9 +701,9 @@ extern "C" int sync_with_remote(struct git_info *info)
|
||||
error = git_remote_lookup(&origin, info->repo, "origin");
|
||||
if (error) {
|
||||
const char *msg = giterr_last()->message;
|
||||
report_info("git storage: repo %s origin lookup failed with: %s", info->url, msg);
|
||||
report_info("git storage: repo %s origin lookup failed with: %s", info->url.c_str(), msg);
|
||||
if (!info->is_subsurface_cloud)
|
||||
report_error("Repository '%s' origin lookup failed (%s)", info->url, msg);
|
||||
report_error("Repository '%s' origin lookup failed (%s)", info->url.c_str(), msg);
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -726,7 +734,7 @@ extern "C" int sync_with_remote(struct git_info *info)
|
||||
if (info->is_subsurface_cloud)
|
||||
report_error("Cannot sync with cloud server, working with offline copy");
|
||||
else
|
||||
report_error("Unable to fetch remote '%s'", info->url);
|
||||
report_error("Unable to fetch remote '%s'", info->url.c_str());
|
||||
// If we returned GIT_EUSER during authentication, giterr_last() returns NULL
|
||||
report_info("git storage: remote fetch failed (%s)\n", giterr_last() ? giterr_last()->message : "authentication failed");
|
||||
// Since we failed to sync with online repository, enter offline mode
|
||||
@ -748,18 +756,17 @@ static bool update_local_repo(struct git_info *info)
|
||||
if (!git_repository_head(&head, info->repo)) {
|
||||
const char *name;
|
||||
if (!git_branch_name(&name, head)) {
|
||||
if (strcmp(name, info->branch)) {
|
||||
char *branchref = format_string("refs/heads/%s", info->branch);
|
||||
report_info("git storage: setting cache branch from '%s' to '%s'", name, info->branch);
|
||||
git_repository_set_head(info->repo, branchref);
|
||||
free(branchref);
|
||||
if (info->branch != name) {
|
||||
std::string branchref = "refs/heads/" + info->branch;
|
||||
report_info("git storage: setting cache branch from '%s' to '%s'", name, info->branch.c_str());
|
||||
git_repository_set_head(info->repo, branchref.c_str());
|
||||
}
|
||||
}
|
||||
git_reference_free(head);
|
||||
}
|
||||
/* make sure we have the correct origin - the cloud server URL could have changed */
|
||||
if (git_remote_set_url(info->repo, "origin", info->url)) {
|
||||
report_info("git storage: failed to update origin to '%s'", info->url);
|
||||
if (git_remote_set_url(info->repo, "origin", info->url.c_str())) {
|
||||
report_info("git storage: failed to update origin to '%s'", info->url.c_str());
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -771,7 +778,6 @@ static bool update_local_repo(struct git_info *info)
|
||||
|
||||
static int repository_create_cb(git_repository **out, const char *path, int bare, void *)
|
||||
{
|
||||
char *proxy_string;
|
||||
git_config *conf;
|
||||
|
||||
int ret = git_repository_init(out, path, bare);
|
||||
@ -782,11 +788,11 @@ static int repository_create_cb(git_repository **out, const char *path, int bare
|
||||
}
|
||||
|
||||
git_repository_config(&conf, *out);
|
||||
if (getProxyString(&proxy_string)) {
|
||||
std::string proxy_string = getProxyString();
|
||||
if (!proxy_string.empty()) {
|
||||
if (verbose)
|
||||
report_info("git storage: set proxy to \"%s\"\n", proxy_string);
|
||||
git_config_set_string(conf, "http.proxy", proxy_string);
|
||||
free(proxy_string);
|
||||
report_info("git storage: set proxy to \"%s\"\n", proxy_string.c_str());
|
||||
git_config_set_string(conf, "http.proxy", proxy_string.c_str());
|
||||
} else {
|
||||
if (verbose)
|
||||
report_info("git storage: delete proxy setting\n");
|
||||
@ -800,34 +806,30 @@ static int repository_create_cb(git_repository **out, const char *path, int bare
|
||||
static bool create_and_push_remote(struct git_info *info)
|
||||
{
|
||||
git_config *conf;
|
||||
char *variable_name, *head;
|
||||
|
||||
if (verbose)
|
||||
report_info("git storage: create and push remote\n");
|
||||
|
||||
/* first make sure the directory for the local cache exists */
|
||||
subsurface_mkdir(info->localdir);
|
||||
subsurface_mkdir(info->localdir.c_str());
|
||||
|
||||
head = format_string("refs/heads/%s", info->branch);
|
||||
std::string head = "refs/heads/" + info->branch;
|
||||
|
||||
/* set up the origin to point to our remote */
|
||||
git_repository_init_options init_opts = GIT_REPOSITORY_INIT_OPTIONS_INIT;
|
||||
init_opts.origin_url = info->url;
|
||||
init_opts.initial_head = head;
|
||||
init_opts.origin_url = info->url.c_str();
|
||||
init_opts.initial_head = head.c_str();
|
||||
|
||||
/* now initialize the repository with */
|
||||
git_repository_init_ext(&info->repo, info->localdir, &init_opts);
|
||||
git_repository_init_ext(&info->repo, info->localdir.c_str(), &init_opts);
|
||||
|
||||
/* create a config so we can set the remote tracking branch */
|
||||
git_repository_config(&conf, info->repo);
|
||||
variable_name = format_string("branch.%s.remote", info->branch);
|
||||
git_config_set_string(conf, variable_name, "origin");
|
||||
free(variable_name);
|
||||
std::string variable_name = "branch." + info->branch + ".remote";
|
||||
git_config_set_string(conf, variable_name.c_str(), "origin");
|
||||
|
||||
variable_name = format_string("branch.%s.merge", info->branch);
|
||||
git_config_set_string(conf, variable_name, head);
|
||||
free(head);
|
||||
free(variable_name);
|
||||
variable_name = "branch." + info->branch + ".merge";
|
||||
git_config_set_string(conf, variable_name.c_str(), head.c_str());
|
||||
|
||||
/* finally create an empty commit and push it to the remote */
|
||||
if (do_git_save(info, false, true))
|
||||
@ -853,18 +855,18 @@ static bool create_local_repo(struct git_info *info)
|
||||
opts.repository_cb = repository_create_cb;
|
||||
opts.fetch_opts.callbacks.certificate_check = certificate_check_cb;
|
||||
|
||||
opts.checkout_branch = info->branch;
|
||||
opts.checkout_branch = info->branch.c_str();
|
||||
if (info->is_subsurface_cloud && !canReachCloudServer(info)) {
|
||||
report_info("git storage: cannot reach remote server");
|
||||
return false;
|
||||
}
|
||||
if (verbose > 1)
|
||||
report_info("git storage: calling git_clone()\n");
|
||||
error = git_clone(&info->repo, info->url, info->localdir, &opts);
|
||||
error = git_clone(&info->repo, info->url.c_str(), info->localdir.c_str(), &opts);
|
||||
if (verbose > 1)
|
||||
report_info("git storage: returned from git_clone() with return value %d\n", error);
|
||||
if (error) {
|
||||
report_info("git storage: clone of %s failed", info->url);
|
||||
report_info("git storage: clone of %s failed", info->url.c_str());
|
||||
const char *msg = "";
|
||||
if (giterr_last()) {
|
||||
msg = giterr_last()->message;
|
||||
@ -872,9 +874,9 @@ static bool create_local_repo(struct git_info *info)
|
||||
} else {
|
||||
report_info("git storage: giterr_last() is null\n");
|
||||
}
|
||||
char *pattern = format_string("reference 'refs/remotes/origin/%s' not found", info->branch);
|
||||
std::string pattern = format_string_std("reference 'refs/remotes/origin/%s' not found", info->branch.c_str());
|
||||
// it seems that we sometimes get 'Reference' and sometimes 'reference'
|
||||
if (includes_string_caseinsensitive(msg, pattern)) {
|
||||
if (includes_string_caseinsensitive(msg, pattern.c_str())) {
|
||||
/* we're trying to open the remote branch that corresponds
|
||||
* to our cloud storage and the branch doesn't exist.
|
||||
* So we need to create the branch and push it to the remote */
|
||||
@ -887,19 +889,18 @@ static bool create_local_repo(struct git_info *info)
|
||||
report_error("%s", translate("gettextFromC", "Error connecting to Subsurface cloud storage"));
|
||||
#endif
|
||||
} else {
|
||||
report_error(translate("gettextFromC", "git clone of %s failed (%s)"), info->url, msg);
|
||||
report_error(translate("gettextFromC", "git clone of %s failed (%s)"), info->url.c_str(), msg);
|
||||
}
|
||||
free(pattern);
|
||||
}
|
||||
return !error;
|
||||
}
|
||||
|
||||
static enum remote_transport url_to_remote_transport(const char *remote)
|
||||
static enum remote_transport url_to_remote_transport(const std::string &remote)
|
||||
{
|
||||
/* figure out the remote transport */
|
||||
if (strncmp(remote, "ssh://", 6) == 0)
|
||||
if (starts_with(remote, "ssh://"))
|
||||
return RT_SSH;
|
||||
else if (strncmp(remote, "https://", 8) == 0)
|
||||
else if (starts_with(remote.c_str(), "https://"))
|
||||
return RT_HTTPS;
|
||||
else
|
||||
return RT_OTHER;
|
||||
@ -910,24 +911,24 @@ static bool get_remote_repo(struct git_info *info)
|
||||
struct stat st;
|
||||
|
||||
if (verbose > 1) {
|
||||
report_info("git storage: accessing %s\n", info->url);
|
||||
report_info("git storage: accessing %s\n", info->url.c_str());
|
||||
}
|
||||
git_storage_update_progress(translate("gettextFromC", "Synchronising data file"));
|
||||
/* Do we already have a local cache? */
|
||||
if (!subsurface_stat(info->localdir, &st)) {
|
||||
if (!subsurface_stat(info->localdir.c_str(), &st)) {
|
||||
int error;
|
||||
|
||||
if (verbose)
|
||||
report_info("git storage: update local repo\n");
|
||||
|
||||
error = git_repository_open(&info->repo, info->localdir);
|
||||
error = git_repository_open(&info->repo, info->localdir.c_str());
|
||||
if (error) {
|
||||
const char *msg = giterr_last()->message;
|
||||
report_info("git storage: unable to open local cache at %s: %s", info->localdir, msg);
|
||||
report_info("git storage: unable to open local cache at %s: %s", info->localdir.c_str(), msg);
|
||||
if (info->is_subsurface_cloud)
|
||||
(void)cleanup_local_cache(info);
|
||||
else
|
||||
report_error("Unable to open git cache repository at %s: %s", info->localdir, msg);
|
||||
report_error("Unable to open git cache repository at %s: %s", info->localdir.c_str(), msg);
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -955,17 +956,17 @@ static bool get_remote_repo(struct git_info *info)
|
||||
* Remove the user name from the url if it exists, and
|
||||
* save it in 'info->username'.
|
||||
*/
|
||||
static void extract_username(struct git_info *info, char *url)
|
||||
std::string extract_username(struct git_info *info, const std::string &url)
|
||||
{
|
||||
char c;
|
||||
char *p = url;
|
||||
const char *p = url.c_str();
|
||||
|
||||
while ((c = *p++) >= 'a' && c <= 'z')
|
||||
/* nothing */;
|
||||
if (c != ':')
|
||||
return;
|
||||
return url;
|
||||
if (*p++ != '/' || *p++ != '/')
|
||||
return;
|
||||
return url;
|
||||
|
||||
/*
|
||||
* Ok, we found "[a-z]*://" and we think we have a real
|
||||
@ -974,38 +975,38 @@ static void extract_username(struct git_info *info, char *url)
|
||||
*/
|
||||
info->transport = url_to_remote_transport(url);
|
||||
|
||||
char *at = strchr(p, '@');
|
||||
const char *at = strchr(p, '@');
|
||||
if (!at)
|
||||
return;
|
||||
return url;
|
||||
|
||||
/* was this the @ that denotes an account? that means it was before the
|
||||
* first '/' after the protocol:// - so let's find a '/' after that and compare */
|
||||
char *slash = strchr(p, '/');
|
||||
const char *slash = strchr(p, '/');
|
||||
if (!slash || at > slash)
|
||||
return;
|
||||
return url;
|
||||
|
||||
/* grab the part between "protocol://" and "@" as encoded email address
|
||||
* (that's our username) and move the rest of the URL forward, remembering
|
||||
* to copy the closing NUL as well */
|
||||
info->username = strndup(p, at - p);
|
||||
memmove(p, at + 1, strlen(at + 1) + 1);
|
||||
info->username = std::string(p, at - p);
|
||||
|
||||
/*
|
||||
* Ugly, ugly. Parsing the remote repo user name also sets
|
||||
* it in the preferences. We should do this somewhere else!
|
||||
*/
|
||||
prefs.cloud_storage_email_encoded = strdup(info->username);
|
||||
prefs.cloud_storage_email_encoded = strdup(info->username.c_str());
|
||||
|
||||
return url.substr(at + 1 - url.c_str());
|
||||
}
|
||||
|
||||
extern "C" void cleanup_git_info(struct git_info *info)
|
||||
git_info::git_info() : repo(nullptr), is_subsurface_cloud(0), transport(RT_LOCAL)
|
||||
{
|
||||
if (info->repo)
|
||||
git_repository_free(info->repo);
|
||||
free((void *)info->url);
|
||||
free((void *)info->branch);
|
||||
free((void *)info->username);
|
||||
free((void *)info->localdir);
|
||||
memset(info, 0, sizeof(*info));
|
||||
}
|
||||
|
||||
git_info::~git_info()
|
||||
{
|
||||
if (repo)
|
||||
git_repository_free(repo);
|
||||
}
|
||||
|
||||
/*
|
||||
@ -1018,16 +1019,14 @@ extern "C" void cleanup_git_info(struct git_info *info)
|
||||
* https://host/repo[branch]
|
||||
* file://repo[branch]
|
||||
*/
|
||||
extern "C" bool is_git_repository(const char *filename, struct git_info *info)
|
||||
bool is_git_repository(const char *filename, struct git_info *info)
|
||||
{
|
||||
int flen, blen;
|
||||
int offset = 1;
|
||||
char *url, *branch;
|
||||
|
||||
/* we are looking at a new potential remote, but we haven't synced with it */
|
||||
git_remote_sync_successful = false;
|
||||
|
||||
memset(info, 0, sizeof(*info));
|
||||
info->transport = RT_LOCAL;
|
||||
flen = strlen(filename);
|
||||
if (!flen || filename[--flen] != ']')
|
||||
@ -1070,11 +1069,11 @@ extern "C" bool is_git_repository(const char *filename, struct git_info *info)
|
||||
* The actual git reading/writing routines can use this
|
||||
* to generate proper error messages.
|
||||
*/
|
||||
url = format_string("%.*s", flen, filename);
|
||||
branch = format_string("%.*s", blen, filename + flen + offset);
|
||||
std::string url(filename, flen);
|
||||
std::string branch(filename + flen + offset, blen);
|
||||
|
||||
/* Extract the username from the url string */
|
||||
extract_username(info, url);
|
||||
url = extract_username(info, url);
|
||||
|
||||
info->url = url;
|
||||
info->branch = branch;
|
||||
@ -1099,10 +1098,10 @@ extern "C" bool is_git_repository(const char *filename, struct git_info *info)
|
||||
*/
|
||||
switch (info->transport) {
|
||||
case RT_LOCAL:
|
||||
info->localdir = strdup(url);
|
||||
info->localdir = url;
|
||||
break;
|
||||
default:
|
||||
info->localdir = get_local_dir(info->url, info->branch);
|
||||
info->localdir = get_local_dir(info->url.c_str(), info->branch).c_str();
|
||||
break;
|
||||
}
|
||||
|
||||
@ -1112,19 +1111,19 @@ extern "C" bool is_git_repository(const char *filename, struct git_info *info)
|
||||
*
|
||||
* This is used to create more user friendly error message and warnings.
|
||||
*/
|
||||
info->is_subsurface_cloud = (strstr(info->url, prefs.cloud_base_url) != NULL);
|
||||
info->is_subsurface_cloud = (strstr(info->url.c_str(), prefs.cloud_base_url) != NULL);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
extern "C" bool open_git_repository(struct git_info *info)
|
||||
bool open_git_repository(struct git_info *info)
|
||||
{
|
||||
/*
|
||||
* If the repository is local, just open it. There's nothing
|
||||
* else to do.
|
||||
*/
|
||||
if (info->transport == RT_LOCAL) {
|
||||
const char *url = info->localdir;
|
||||
const char *url = info->localdir.c_str();
|
||||
|
||||
if (git_repository_open(&info->repo, url)) {
|
||||
if (verbose)
|
||||
@ -1145,15 +1144,13 @@ extern "C" bool open_git_repository(struct git_info *info)
|
||||
return get_remote_repo(info);
|
||||
}
|
||||
|
||||
extern "C" int git_create_local_repo(const char *filename)
|
||||
int git_create_local_repo(const std::string &filename)
|
||||
{
|
||||
git_repository *repo;
|
||||
char *path = strdup(filename);
|
||||
char *branch = strchr(path, '[');
|
||||
if (branch)
|
||||
*branch = '\0';
|
||||
int ret = git_repository_init(&repo, path, false);
|
||||
free(path);
|
||||
|
||||
auto idx = filename.find('[');
|
||||
std::string path = filename.substr(0, idx);
|
||||
int ret = git_repository_init(&repo, path.c_str(), false);
|
||||
if (ret != 0)
|
||||
(void)report_error("Create local repo failed with error code %d", ret);
|
||||
git_repository_free(repo);
|
||||
|
||||
@ -22,20 +22,38 @@ extern "C" {
|
||||
|
||||
enum remote_transport { RT_LOCAL, RT_HTTPS, RT_SSH, RT_OTHER };
|
||||
|
||||
extern bool git_local_only;
|
||||
extern bool git_remote_sync_successful;
|
||||
extern void clear_git_id(void);
|
||||
extern void set_git_id(const struct git_oid *);
|
||||
void set_git_update_cb(int(*)(const char *));
|
||||
int git_storage_update_progress(const char *text);
|
||||
int get_authorship(git_repository *repo, git_signature **authorp);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
|
||||
#include <string>
|
||||
|
||||
struct git_oid;
|
||||
struct git_repository;
|
||||
struct divelog;
|
||||
|
||||
struct git_info {
|
||||
const char *url;
|
||||
const char *branch;
|
||||
const char *username;
|
||||
const char *localdir;
|
||||
std::string url;
|
||||
std::string branch;
|
||||
std::string username;
|
||||
std::string localdir;
|
||||
struct git_repository *repo;
|
||||
unsigned is_subsurface_cloud:1;
|
||||
enum remote_transport transport;
|
||||
git_info();
|
||||
~git_info();
|
||||
};
|
||||
|
||||
extern std::string saved_git_id;
|
||||
extern std::string get_sha(git_repository *repo, const std::string &branch);
|
||||
extern std::string get_local_dir(const std::string &, const std::string &);
|
||||
extern bool is_git_repository(const char *filename, struct git_info *info);
|
||||
extern bool open_git_repository(struct git_info *info);
|
||||
extern bool remote_repo_uptodate(const char *filename, struct git_info *info);
|
||||
@ -43,23 +61,7 @@ extern int sync_with_remote(struct git_info *);
|
||||
extern int git_save_dives(struct git_info *, bool select_only);
|
||||
extern int git_load_dives(struct git_info *, struct divelog *log);
|
||||
extern int do_git_save(struct git_info *, bool select_only, bool create_empty);
|
||||
extern void cleanup_git_info(struct git_info *);
|
||||
extern bool git_local_only;
|
||||
extern bool git_remote_sync_successful;
|
||||
extern void clear_git_id(void);
|
||||
extern void set_git_id(const struct git_oid *);
|
||||
void set_git_update_cb(int(*)(const char *));
|
||||
int git_storage_update_progress(const char *text);
|
||||
char *get_local_dir(const char *, const char *);
|
||||
int git_create_local_repo(const char *filename);
|
||||
int get_authorship(git_repository *repo, git_signature **authorp);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
|
||||
#include <string>
|
||||
extern std::string saved_git_id;
|
||||
extern std::string get_sha(git_repository *repo, const char *branch);
|
||||
extern int git_create_local_repo(const std::string &filename);
|
||||
|
||||
#endif
|
||||
#endif // GITACCESS_H
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
#include "dive.h"
|
||||
#include "metrics.h"
|
||||
#include "divelist.h"
|
||||
#include "errorhelper.h"
|
||||
#include "qthelper.h"
|
||||
#include "imagedownloader.h"
|
||||
#include "videoframeextractor.h"
|
||||
@ -56,7 +57,7 @@ void ImageDownloader::saveImage(QNetworkReply *reply)
|
||||
hash.addData(filename.toUtf8());
|
||||
QFile imageFile(path.append("/").append(hash.result().toHex()));
|
||||
if (imageFile.open(QIODevice::WriteOnly)) {
|
||||
qDebug() << "Write image to" << imageFile.fileName();
|
||||
report_info("Write image to %s", qPrintable(imageFile.fileName()));
|
||||
QDataStream stream(&imageFile);
|
||||
stream.writeRawData(imageData.data(), imageData.length());
|
||||
imageFile.waitForBytesWritten(-1);
|
||||
|
||||
@ -8,6 +8,7 @@
|
||||
#include "ssrf.h"
|
||||
#include "dive.h"
|
||||
#include "divesite.h"
|
||||
#include "errorhelper.h"
|
||||
#include "gas.h"
|
||||
#include "parse.h"
|
||||
#include "sample.h"
|
||||
@ -147,35 +148,35 @@ static int cobalt_dive(void *param, int, char **data, char **)
|
||||
snprintf(get_buffer, sizeof(get_buffer) - 1, get_cylinder_template, state->cur_dive->number);
|
||||
retval = sqlite3_exec(handle, get_buffer, &cobalt_cylinders, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query cobalt_cylinders failed.\n");
|
||||
report_info("Database query cobalt_cylinders failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
snprintf(get_buffer, sizeof(get_buffer) - 1, get_buddy_template, state->cur_dive->number);
|
||||
retval = sqlite3_exec(handle, get_buffer, &cobalt_buddies, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query cobalt_buddies failed.\n");
|
||||
report_info("Database query cobalt_buddies failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
snprintf(get_buffer, sizeof(get_buffer) - 1, get_visibility_template, state->cur_dive->number);
|
||||
retval = sqlite3_exec(handle, get_buffer, &cobalt_visibility, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query cobalt_visibility failed.\n");
|
||||
report_info("Database query cobalt_visibility failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
snprintf(get_buffer, sizeof(get_buffer) - 1, get_location_template, state->cur_dive->number);
|
||||
retval = sqlite3_exec(handle, get_buffer, &cobalt_location, &location, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query cobalt_location failed.\n");
|
||||
report_info("Database query cobalt_location failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
snprintf(get_buffer, sizeof(get_buffer) - 1, get_site_template, state->cur_dive->number);
|
||||
retval = sqlite3_exec(handle, get_buffer, &cobalt_location, &location_site, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query cobalt_location (site) failed.\n");
|
||||
report_info("Database query cobalt_location (site) failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
@ -196,7 +197,7 @@ static int cobalt_dive(void *param, int, char **data, char **)
|
||||
snprintf(get_buffer, sizeof(get_buffer) - 1, get_profile_template, state->cur_dive->number);
|
||||
retval = sqlite3_exec(handle, get_buffer, &cobalt_profile_sample, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query cobalt_profile_sample failed.\n");
|
||||
report_info("Database query cobalt_profile_sample failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
@ -219,7 +220,7 @@ extern "C" int parse_cobalt_buffer(sqlite3 *handle, const char *url, const char
|
||||
retval = sqlite3_exec(handle, get_dives, &cobalt_dive, &state, NULL);
|
||||
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "Database query failed '%s'.\n", url);
|
||||
report_info("Database query failed '%s'.\n", url);
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
@ -10,6 +10,7 @@
|
||||
#include "divelist.h"
|
||||
#include "divelog.h"
|
||||
#include "file.h"
|
||||
#include "format.h"
|
||||
#include "parse.h"
|
||||
#include "sample.h"
|
||||
#include "divelist.h"
|
||||
@ -100,7 +101,7 @@ static char *parse_dan_new_line(char *buf, const char *NL)
|
||||
if (iter) {
|
||||
iter += strlen(NL);
|
||||
} else {
|
||||
fprintf(stderr, "DEBUG: No new line found\n");
|
||||
report_info("DEBUG: No new line found");
|
||||
return NULL;
|
||||
}
|
||||
return iter;
|
||||
@ -128,7 +129,7 @@ static int parse_dan_format(const char *filename, struct xml_params *params, str
|
||||
} else if ((ptr = strstr(mem.data(), "\n")) != NULL) {
|
||||
NL = "\n";
|
||||
} else {
|
||||
fprintf(stderr, "DEBUG: failed to detect NL\n");
|
||||
report_info("DEBUG: failed to detect NL");
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -144,7 +145,7 @@ static int parse_dan_format(const char *filename, struct xml_params *params, str
|
||||
xml_params_add(params, "diveNro", tmpbuf);
|
||||
}
|
||||
|
||||
//fprintf(stderr, "DEBUG: BEGIN end_ptr %d round %d <%s>\n", end_ptr, j++, ptr);
|
||||
//report_info("DEBUG: BEGIN end_ptr %d round %d <%s>", end_ptr, j++, ptr);
|
||||
iter = ptr + 1;
|
||||
for (i = 0; i <= 4 && iter; ++i) {
|
||||
iter = strchr(iter, '|');
|
||||
@ -153,7 +154,7 @@ static int parse_dan_format(const char *filename, struct xml_params *params, str
|
||||
}
|
||||
|
||||
if (!iter) {
|
||||
fprintf(stderr, "DEBUG: Data corrupt");
|
||||
report_info("DEBUG: Data corrupt");
|
||||
return -1;
|
||||
}
|
||||
|
||||
@ -214,7 +215,7 @@ static int parse_dan_format(const char *filename, struct xml_params *params, str
|
||||
|
||||
/* After ZDH we should get either ZDT (above) or ZDP */
|
||||
if (strncmp(iter, "ZDP{", 4) != 0) {
|
||||
fprintf(stderr, "DEBUG: Input appears to violate DL7 specification\n");
|
||||
report_info("DEBUG: Input appears to violate DL7 specification");
|
||||
end_ptr = iter - mem.data();
|
||||
continue;
|
||||
}
|
||||
@ -236,7 +237,7 @@ static int parse_dan_format(const char *filename, struct xml_params *params, str
|
||||
if (ptr) {
|
||||
*ptr = 0;
|
||||
} else {
|
||||
fprintf(stderr, "DEBUG: failed to find end ZDP\n");
|
||||
report_info("DEBUG: failed to find end ZDP");
|
||||
return -1;
|
||||
}
|
||||
mem_csv.resize(ptr - mem_csv.data());
|
||||
@ -313,10 +314,11 @@ extern "C" int parse_csv_file(const char *filename, struct xml_params *params, c
|
||||
|
||||
#ifndef SUBSURFACE_MOBILE
|
||||
if (verbose >= 2) {
|
||||
fprintf(stderr, "(echo '<csv>'; cat %s;echo '</csv>') | xsltproc ", filename);
|
||||
std::string info = format_string_std("(echo '<csv>'; cat %s;echo '</csv>') | xsltproc ", filename);
|
||||
for (int i = 0; i < xml_params_count(params); i++)
|
||||
fprintf(stderr, "--stringparam %s %s ", xml_params_get_key(params, i), xml_params_get_value(params, i));
|
||||
fprintf(stderr, "%s/xslt/%s -\n", SUBSURFACE_SOURCE, csvtemplate);
|
||||
info += format_string_std("--stringparam %s %s ", xml_params_get_key(params, i), xml_params_get_value(params, i));
|
||||
info += format_string_std("%s/xslt/%s -", SUBSURFACE_SOURCE, csvtemplate);
|
||||
report_info("%s", info.c_str());
|
||||
}
|
||||
#endif
|
||||
ret = parse_xml_buffer(filename, mem.data(), mem.size(), log, params);
|
||||
@ -385,8 +387,11 @@ static int try_to_xslt_open_csv(const char *filename, std::string &mem, const ch
|
||||
memcpy(ptr_out, tag, tag_name_size);
|
||||
*--ptr_out = '<';
|
||||
|
||||
// On Windows, ptrdiff_t is long long int, on Linux it is long int.
|
||||
// Windows doesn't support the ptrdiff_t format specifier "%td", so
|
||||
// let's cast to long int.
|
||||
if (ptr_out != mem.data())
|
||||
fprintf(stderr, "try_to_xslt_open_csv(): ptr_out off by %ld. This shouldn't happen\n", ptr_out - mem.data());
|
||||
report_info("try_to_xslt_open_csv(): ptr_out off by %ld. This shouldn't happen", static_cast<long int>(ptr_out - mem.data()));
|
||||
|
||||
return 0;
|
||||
}
|
||||
@ -719,7 +724,7 @@ int parse_txt_file(const char *filename, const char *csv, struct divelog *log)
|
||||
case EOF:
|
||||
break;
|
||||
default:
|
||||
printf("Unable to parse input: %s\n", lineptr);
|
||||
report_info("Unable to parse input: %s\n", lineptr);
|
||||
break;
|
||||
}
|
||||
|
||||
@ -886,10 +891,11 @@ static int parse_seabear_csv_file(const char *filename, struct xml_params *param
|
||||
*/
|
||||
|
||||
if (verbose >= 2) {
|
||||
fprintf(stderr, "xsltproc ");
|
||||
std::string info = "xsltproc ";
|
||||
for (i = 0; i < xml_params_count(params); i++)
|
||||
fprintf(stderr, "--stringparam %s %s ", xml_params_get_key(params, i), xml_params_get_value(params, i));
|
||||
fprintf(stderr, "xslt/csv2xml.xslt\n");
|
||||
info += format_string_std("--stringparam %s %s ", xml_params_get_key(params, i), xml_params_get_value(params, i));
|
||||
info += "xslt/csv2xml.xslt";
|
||||
report_info("%s", info.c_str());
|
||||
}
|
||||
|
||||
ret = parse_xml_buffer(filename, mem.data(), mem.size(), log, params);
|
||||
@ -926,10 +932,11 @@ int parse_manual_file(const char *filename, struct xml_params *params, struct di
|
||||
|
||||
#ifndef SUBSURFACE_MOBILE
|
||||
if (verbose >= 2) {
|
||||
fprintf(stderr, "(echo '<manualCSV>'; cat %s;echo '</manualCSV>') | xsltproc ", filename);
|
||||
std::string info = format_string_std("(echo '<manualCSV>'; cat %s;echo '</manualCSV>') | xsltproc ", filename);
|
||||
for (int i = 0; i < xml_params_count(params); i++)
|
||||
fprintf(stderr, "--stringparam %s %s ", xml_params_get_key(params, i), xml_params_get_value(params, i));
|
||||
fprintf(stderr, "%s/xslt/manualcsv2xml.xslt -\n", SUBSURFACE_SOURCE);
|
||||
info += format_string_std("--stringparam %s %s ", xml_params_get_key(params, i), xml_params_get_value(params, i));
|
||||
info += format_string_std("%s/xslt/manualcsv2xml.xslt -", SUBSURFACE_SOURCE);
|
||||
report_info("%s", info.c_str());
|
||||
}
|
||||
#endif
|
||||
ret = parse_xml_buffer(filename, mem.data(), mem.size(), log, params);
|
||||
|
||||
@ -13,6 +13,7 @@
|
||||
#include "divelist.h"
|
||||
#include "divelog.h"
|
||||
#include "device.h"
|
||||
#include "errorhelper.h"
|
||||
#include "membuffer.h"
|
||||
#include "gettext.h"
|
||||
|
||||
@ -336,14 +337,14 @@ static int divinglog_dive(void *param, int, char **data, char **)
|
||||
snprintf(get_buffer, sizeof(get_buffer) - 1, get_cylinder0_template, diveid);
|
||||
retval = sqlite3_exec(handle, get_buffer, &divinglog_cylinder, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query divinglog_cylinder0 failed.\n");
|
||||
report_info("Database query divinglog_cylinder0 failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
snprintf(get_buffer, sizeof(get_buffer) - 1, get_cylinder_template, diveid);
|
||||
retval = sqlite3_exec(handle, get_buffer, &divinglog_cylinder, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query divinglog_cylinder failed.\n");
|
||||
report_info("Database query divinglog_cylinder failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
@ -373,7 +374,7 @@ static int divinglog_dive(void *param, int, char **data, char **)
|
||||
snprintf(get_buffer, sizeof(get_buffer) - 1, get_profile_template, diveid);
|
||||
retval = sqlite3_exec(handle, get_buffer, &divinglog_profile, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query divinglog_profile failed.\n");
|
||||
report_info("Database query divinglog_profile failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
@ -396,7 +397,7 @@ extern "C" int parse_divinglog_buffer(sqlite3 *handle, const char *url, const ch
|
||||
retval = sqlite3_exec(handle, get_dives, &divinglog_dive, &state, NULL);
|
||||
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "Database query failed '%s'.\n", url);
|
||||
report_info("Database query failed '%s'.", url);
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
@ -143,7 +143,7 @@ static int seac_dive(void *param, int, char **data, char **)
|
||||
break;
|
||||
default:
|
||||
if (verbose) {
|
||||
fprintf(stderr, "Unknown divetype %i", atoi(data[6]));
|
||||
report_info("Unknown divetype %i", atoi(data[6]));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -174,7 +174,7 @@ static int seac_dive(void *param, int, char **data, char **)
|
||||
break;
|
||||
default:
|
||||
if (verbose) {
|
||||
fprintf(stderr, "Unknown salinity %i", atoi(data[8]));
|
||||
report_info("Unknown salinity %i", atoi(data[8]));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -187,7 +187,7 @@ static int seac_dive(void *param, int, char **data, char **)
|
||||
// Create sql_stmt type to query DB
|
||||
retval = sqlite3_prepare_v2(handle, get_samples, -1, &sqlstmt, 0);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Preparing SQL object failed when getting SeacSync dives.\n");
|
||||
report_info("Preparing SQL object failed when getting SeacSync dives.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
@ -198,7 +198,7 @@ static int seac_dive(void *param, int, char **data, char **)
|
||||
// Catch a bad query
|
||||
retval = sqlite3_step(sqlstmt);
|
||||
if (retval == SQLITE_ERROR) {
|
||||
fprintf(stderr, "%s", "Getting dive data from SeacSync DB failed.\n");
|
||||
report_info("Getting dive data from SeacSync DB failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
@ -293,7 +293,7 @@ extern "C" int parse_seac_buffer(sqlite3 *handle, const char *url, const char *,
|
||||
retval = sqlite3_exec(handle, get_dives, &seac_dive, &state, &err);
|
||||
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "Database query failed '%s'.\n", url);
|
||||
report_info("Database query failed '%s'.", url);
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
@ -12,6 +12,7 @@
|
||||
#include "divelist.h"
|
||||
#include "divelog.h"
|
||||
#include "device.h"
|
||||
#include "errorhelper.h"
|
||||
#include "membuffer.h"
|
||||
#include "gettext.h"
|
||||
|
||||
@ -300,7 +301,7 @@ static int shearwater_dive(void *param, int, char **data, char **)
|
||||
snprintf(get_buffer, sizeof(get_buffer) - 1, get_mode_template, dive_id);
|
||||
retval = sqlite3_exec(handle, get_buffer, &shearwater_mode, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query shearwater_mode failed.\n");
|
||||
report_info("Database query shearwater_mode failed.");
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
@ -308,14 +309,14 @@ static int shearwater_dive(void *param, int, char **data, char **)
|
||||
snprintf(get_buffer, sizeof(get_buffer) - 1, get_cylinder_template, dive_id);
|
||||
retval = sqlite3_exec(handle, get_buffer, &shearwater_cylinders, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query shearwater_cylinders failed.\n");
|
||||
report_info("Database query shearwater_cylinders failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
snprintf(get_buffer, sizeof(get_buffer) - 1, get_changes_template, dive_id);
|
||||
retval = sqlite3_exec(handle, get_buffer, &shearwater_changes, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query shearwater_changes failed.\n");
|
||||
report_info("Database query shearwater_changes failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
@ -325,7 +326,7 @@ static int shearwater_dive(void *param, int, char **data, char **)
|
||||
snprintf(get_buffer, sizeof(get_buffer) - 1, get_profile_template, dive_id);
|
||||
retval = sqlite3_exec(handle, get_buffer, &shearwater_profile_sample, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query shearwater_profile_sample failed.\n");
|
||||
report_info("Database query shearwater_profile_sample failed.");
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
@ -430,7 +431,7 @@ static int shearwater_cloud_dive(void *param, int, char **data, char **)
|
||||
snprintf(get_buffer, sizeof(get_buffer) - 1, get_mode_template, dive_id);
|
||||
retval = sqlite3_exec(handle, get_buffer, &shearwater_mode, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query shearwater_mode failed.\n");
|
||||
report_info("Database query shearwater_mode failed.");
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
@ -438,21 +439,21 @@ static int shearwater_cloud_dive(void *param, int, char **data, char **)
|
||||
snprintf(get_buffer, sizeof(get_buffer) - 1, get_cylinder_template, dive_id);
|
||||
retval = sqlite3_exec(handle, get_buffer, &shearwater_cylinders, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query shearwater_cylinders failed.\n");
|
||||
report_info("Database query shearwater_cylinders failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
snprintf(get_buffer, sizeof(get_buffer) - 1, get_first_gas_template, dive_id);
|
||||
retval = sqlite3_exec(handle, get_buffer, &shearwater_changes, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query shearwater_changes failed.\n");
|
||||
report_info("Database query shearwater_changes failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
snprintf(get_buffer, sizeof(get_buffer) - 1, get_changes_template, dive_id);
|
||||
retval = sqlite3_exec(handle, get_buffer, &shearwater_changes, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query shearwater_changes failed.\n");
|
||||
report_info("Database query shearwater_changes failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
@ -462,7 +463,7 @@ static int shearwater_cloud_dive(void *param, int, char **data, char **)
|
||||
snprintf(get_buffer, sizeof(get_buffer) - 1, get_profile_template, dive_id, dive_id);
|
||||
retval = sqlite3_exec(handle, get_buffer, &shearwater_profile_sample, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query shearwater_profile_sample failed.\n");
|
||||
report_info("Database query shearwater_profile_sample failed.");
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
@ -488,7 +489,7 @@ extern "C" int parse_shearwater_buffer(sqlite3 *handle, const char *url, const c
|
||||
retval = sqlite3_exec(handle, get_dives, &shearwater_dive, &state, NULL);
|
||||
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "Database query failed '%s'.\n", url);
|
||||
report_info("Database query failed '%s'.", url);
|
||||
return 1;
|
||||
}
|
||||
|
||||
@ -508,7 +509,7 @@ extern "C" int parse_shearwater_cloud_buffer(sqlite3 *handle, const char *url, c
|
||||
retval = sqlite3_exec(handle, get_dives, &shearwater_cloud_dive, &state, NULL);
|
||||
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "Database query failed '%s'.\n", url);
|
||||
report_info("Database query failed '%s'.", url);
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
@ -12,6 +12,7 @@
|
||||
#include "divelist.h"
|
||||
#include "divelog.h"
|
||||
#include "device.h"
|
||||
#include "errorhelper.h"
|
||||
#include "membuffer.h"
|
||||
#include "gettext.h"
|
||||
#include "tag.h"
|
||||
@ -260,30 +261,19 @@ static int dm4_dive(void *param, int, char **data, char **)
|
||||
snprintf(get_events, sizeof(get_events) - 1, get_events_template, state->cur_dive->number);
|
||||
retval = sqlite3_exec(handle, get_events, &dm4_events, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query dm4_events failed.\n");
|
||||
report_info("Database query dm4_events failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
snprintf(get_events, sizeof(get_events) - 1, get_tags_template, state->cur_dive->number);
|
||||
retval = sqlite3_exec(handle, get_events, &dm4_tags, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query dm4_tags failed.\n");
|
||||
report_info("Database query dm4_tags failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
dive_end(state);
|
||||
|
||||
/*
|
||||
for (i=0; i<columns;++i) {
|
||||
fprintf(stderr, "%s\t", column[i]);
|
||||
}
|
||||
fprintf(stderr, "\n");
|
||||
for (i=0; i<columns;++i) {
|
||||
fprintf(stderr, "%s\t", data[i]);
|
||||
}
|
||||
fprintf(stderr, "\n");
|
||||
//exit(0);
|
||||
*/
|
||||
return SQLITE_OK;
|
||||
}
|
||||
|
||||
@ -303,7 +293,7 @@ extern "C" int parse_dm4_buffer(sqlite3 *handle, const char *url, const char *,
|
||||
retval = sqlite3_exec(handle, get_dives, &dm4_dive, &state, &err);
|
||||
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "Database query failed '%s'.\n", url);
|
||||
report_info("Database query failed '%s'.", url);
|
||||
return 1;
|
||||
}
|
||||
|
||||
@ -430,7 +420,7 @@ static int dm5_dive(void *param, int, char **data, char **)
|
||||
snprintf(get_events, sizeof(get_events) - 1, get_cylinders_template, state->cur_dive->number);
|
||||
retval = sqlite3_exec(handle, get_events, &dm5_cylinders, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query dm5_cylinders failed.\n");
|
||||
report_info("Database query dm5_cylinders failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
@ -536,21 +526,21 @@ static int dm5_dive(void *param, int, char **data, char **)
|
||||
snprintf(get_events, sizeof(get_events) - 1, get_gaschange_template, state->cur_dive->number);
|
||||
retval = sqlite3_exec(handle, get_events, &dm5_gaschange, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query dm5_gaschange failed.\n");
|
||||
report_info("Database query dm5_gaschange failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
snprintf(get_events, sizeof(get_events) - 1, get_events_template, state->cur_dive->number);
|
||||
retval = sqlite3_exec(handle, get_events, &dm4_events, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query dm4_events failed.\n");
|
||||
report_info("Database query dm4_events failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
snprintf(get_events, sizeof(get_events) - 1, get_tags_template, state->cur_dive->number);
|
||||
retval = sqlite3_exec(handle, get_events, &dm4_tags, state, NULL);
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "%s", "Database query dm4_tags failed.\n");
|
||||
report_info("Database query dm4_tags failed.");
|
||||
return 1;
|
||||
}
|
||||
|
||||
@ -575,7 +565,7 @@ extern "C" int parse_dm5_buffer(sqlite3 *handle, const char *url, const char *,
|
||||
retval = sqlite3_exec(handle, get_dives, &dm5_dive, &state, &err);
|
||||
|
||||
if (retval != SQLITE_OK) {
|
||||
fprintf(stderr, "Database query failed '%s'.\n", url);
|
||||
report_info("Database query failed '%s'.", url);
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
@ -19,6 +19,7 @@
|
||||
#include "sample.h"
|
||||
#include "subsurface-float.h"
|
||||
#include "subsurface-string.h"
|
||||
#include "format.h"
|
||||
#include "device.h"
|
||||
#include "dive.h"
|
||||
#include "errorhelper.h"
|
||||
@ -50,15 +51,8 @@ static int stoptime, stopdepth, ndl, po2, cns, heartbeat, bearing;
|
||||
static bool in_deco, first_temp_is_air;
|
||||
static int current_gas_index;
|
||||
|
||||
/* logging bits from libdivecomputer */
|
||||
#ifndef __ANDROID__
|
||||
#define INFO(context, fmt, ...) fprintf(stderr, "INFO: " fmt "\n", ##__VA_ARGS__)
|
||||
#define ERROR(context, fmt, ...) fprintf(stderr, "ERROR: " fmt "\n", ##__VA_ARGS__)
|
||||
#else
|
||||
#include <android/log.h>
|
||||
#define INFO(context, fmt, ...) __android_log_print(ANDROID_LOG_DEBUG, __FILE__, "INFO: " fmt "\n", ##__VA_ARGS__)
|
||||
#define ERROR(context, fmt, ...) __android_log_print(ANDROID_LOG_DEBUG, __FILE__, "ERROR: " fmt "\n", ##__VA_ARGS__)
|
||||
#endif
|
||||
#define INFO(fmt, ...) report_info("INFO: " fmt, ##__VA_ARGS__)
|
||||
#define ERROR(fmt, ...) report_info("ERROR: " fmt, ##__VA_ARGS__)
|
||||
|
||||
/*
|
||||
* Directly taken from libdivecomputer's examples/common.c to improve
|
||||
@ -142,7 +136,7 @@ static dc_status_t parse_gasmixes(device_data_t *devdata, struct dive *dive, dc_
|
||||
{
|
||||
static bool shown_warning = false;
|
||||
unsigned int i;
|
||||
int rc;
|
||||
dc_status_t rc;
|
||||
|
||||
unsigned int ntanks = 0;
|
||||
rc = dc_parser_get_field(parser, DC_FIELD_TANK_COUNT, 0, &ntanks);
|
||||
@ -162,7 +156,7 @@ static dc_status_t parse_gasmixes(device_data_t *devdata, struct dive *dive, dc_
|
||||
}
|
||||
|
||||
clear_cylinder_table(&dive->cylinders);
|
||||
for (i = 0; i < MAX(ngases, ntanks); i++) {
|
||||
for (i = 0; i < std::max(ngases, ntanks); i++) {
|
||||
cylinder_t cyl = empty_cylinder;
|
||||
cyl.cylinder_use = NOT_USED;
|
||||
|
||||
@ -447,7 +441,7 @@ sample_cb(dc_sample_type_t type, const dc_sample_value_t *pvalue, void *userdata
|
||||
break;
|
||||
#ifdef DEBUG_DC_VENDOR
|
||||
case DC_SAMPLE_VENDOR:
|
||||
printf(" <vendor time='%u:%02u' type=\"%u\" size=\"%u\">", FRACTION(sample->time.seconds, 60),
|
||||
printf(" <vendor time='%u:%02u' type=\"%u\" size=\"%u\">", FRACTION_TUPLE(sample->time.seconds, 60),
|
||||
value.vendor.type, value.vendor.size);
|
||||
for (int i = 0; i < value.vendor.size; ++i)
|
||||
printf("%02X", ((unsigned char *)value.vendor.data)[i]);
|
||||
@ -503,7 +497,7 @@ static void dev_info(device_data_t *, const char *fmt, ...)
|
||||
va_end(ap);
|
||||
progress_bar_text = buffer;
|
||||
if (verbose)
|
||||
INFO(0, "dev_info: %s\n", buffer);
|
||||
INFO("dev_info: %s", buffer);
|
||||
|
||||
if (progress_callback)
|
||||
(*progress_callback)(buffer);
|
||||
@ -522,7 +516,7 @@ static void download_error(const char *fmt, ...)
|
||||
report_error("Dive %d: %s", import_dive_number, buffer);
|
||||
}
|
||||
|
||||
static int parse_samples(device_data_t *, struct divecomputer *dc, dc_parser_t *parser)
|
||||
static dc_status_t parse_samples(device_data_t *, struct divecomputer *dc, dc_parser_t *parser)
|
||||
{
|
||||
// Parse the sample data.
|
||||
return dc_parser_samples_foreach(parser, sample_cb, dc);
|
||||
@ -821,7 +815,7 @@ static int dive_cb(const unsigned char *data, unsigned int size,
|
||||
const unsigned char *fingerprint, unsigned int fsize,
|
||||
void *userdata)
|
||||
{
|
||||
int rc;
|
||||
dc_status_t rc;
|
||||
dc_parser_t *parser = NULL;
|
||||
device_data_t *devdata = (device_data_t *)userdata;
|
||||
struct dive *dive = NULL;
|
||||
@ -836,7 +830,7 @@ static int dive_cb(const unsigned char *data, unsigned int size,
|
||||
|
||||
rc = dc_parser_new(&parser, devdata->device, data, size);
|
||||
if (rc != DC_STATUS_SUCCESS) {
|
||||
download_error(translate("gettextFromC", "Unable to create parser for %s %s"), devdata->vendor, devdata->product);
|
||||
download_error(translate("gettextFromC", "Unable to create parser for %s %s: %d"), devdata->vendor, devdata->product, errmsg(rc));
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -849,14 +843,14 @@ static int dive_cb(const unsigned char *data, unsigned int size,
|
||||
// Parse the dive's header data
|
||||
rc = libdc_header_parser (parser, devdata, dive);
|
||||
if (rc != DC_STATUS_SUCCESS) {
|
||||
download_error(translate("getextFromC", "Error parsing the header"));
|
||||
download_error(translate("getextFromC", "Error parsing the header: %s"), errmsg(rc));
|
||||
goto error_exit;
|
||||
}
|
||||
|
||||
// Initialize the sample data.
|
||||
rc = parse_samples(devdata, &dive->dc, parser);
|
||||
if (rc != DC_STATUS_SUCCESS) {
|
||||
download_error(translate("gettextFromC", "Error parsing the samples"));
|
||||
download_error(translate("gettextFromC", "Error parsing the samples: %s"), errmsg(rc));
|
||||
goto error_exit;
|
||||
}
|
||||
|
||||
@ -1098,14 +1092,14 @@ static void event_cb(dc_device_t *device, dc_event_type_t event, const void *dat
|
||||
if (dc_descriptor_get_model(devdata->descriptor) != devinfo->model) {
|
||||
dc_descriptor_t *better_descriptor = get_descriptor(dc_descriptor_get_type(devdata->descriptor), devinfo->model);
|
||||
if (better_descriptor != NULL) {
|
||||
fprintf(stderr, "EVENT_DEVINFO gave us a different detected product (model %d instead of %d), which we are using now.\n",
|
||||
report_info("EVENT_DEVINFO gave us a different detected product (model %d instead of %d), which we are using now.",
|
||||
devinfo->model, dc_descriptor_get_model(devdata->descriptor));
|
||||
devdata->descriptor = better_descriptor;
|
||||
devdata->product = dc_descriptor_get_product(better_descriptor);
|
||||
devdata->vendor = dc_descriptor_get_vendor(better_descriptor);
|
||||
devdata->model = str_printf("%s %s", devdata->vendor, devdata->product);
|
||||
} else {
|
||||
fprintf(stderr, "EVENT_DEVINFO gave us a different detected product (model %d instead of %d), but that one is unknown.\n",
|
||||
report_info("EVENT_DEVINFO gave us a different detected product (model %d instead of %d), but that one is unknown.",
|
||||
devinfo->model, dc_descriptor_get_model(devdata->descriptor));
|
||||
}
|
||||
}
|
||||
@ -1160,13 +1154,19 @@ static const char *do_device_import(device_data_t *data)
|
||||
// Register the event handler.
|
||||
int events = DC_EVENT_WAITING | DC_EVENT_PROGRESS | DC_EVENT_DEVINFO | DC_EVENT_CLOCK | DC_EVENT_VENDOR;
|
||||
rc = dc_device_set_events(device, events, event_cb, data);
|
||||
if (rc != DC_STATUS_SUCCESS)
|
||||
if (rc != DC_STATUS_SUCCESS) {
|
||||
dev_info(data, "Import error: %s", errmsg(rc));
|
||||
|
||||
return translate("gettextFromC", "Error registering the event handler.");
|
||||
}
|
||||
|
||||
// Register the cancellation handler.
|
||||
rc = dc_device_set_cancel(device, cancel_cb, data);
|
||||
if (rc != DC_STATUS_SUCCESS)
|
||||
if (rc != DC_STATUS_SUCCESS) {
|
||||
dev_info(data, "Import error: %s", errmsg(rc));
|
||||
|
||||
return translate("gettextFromC", "Error registering the cancellation handler.");
|
||||
}
|
||||
|
||||
if (data->libdc_dump) {
|
||||
dc_buffer_t *buffer = dc_buffer_new(0);
|
||||
@ -1188,6 +1188,8 @@ static const char *do_device_import(device_data_t *data)
|
||||
if (rc == DC_STATUS_UNSUPPORTED)
|
||||
return translate("gettextFromC", "Dumping not supported on this device");
|
||||
|
||||
dev_info(data, "Import error: %s", errmsg(rc));
|
||||
|
||||
return translate("gettextFromC", "Dive data dumping error");
|
||||
}
|
||||
} else {
|
||||
@ -1196,6 +1198,8 @@ static const char *do_device_import(device_data_t *data)
|
||||
if (rc != DC_STATUS_SUCCESS) {
|
||||
progress_bar_fraction = 0.0;
|
||||
|
||||
dev_info(data, "Import error: %s", errmsg(rc));
|
||||
|
||||
return translate("gettextFromC", "Dive data import error");
|
||||
}
|
||||
}
|
||||
@ -1287,7 +1291,7 @@ static dc_status_t usbhid_device_open(dc_iostream_t **iostream, dc_context_t *co
|
||||
dc_iterator_free (iterator);
|
||||
|
||||
if (!device) {
|
||||
ERROR(context, "didn't find HID device\n");
|
||||
ERROR("didn't find HID device");
|
||||
return DC_STATUS_NODEVICE;
|
||||
}
|
||||
dev_info(data, "Opening USB HID device for %04x:%04x",
|
||||
@ -1362,7 +1366,7 @@ static dc_status_t bluetooth_device_open(dc_context_t *context, device_data_t *d
|
||||
dc_iterator_free (iterator);
|
||||
|
||||
if (!address) {
|
||||
report_error("No rfcomm device found");
|
||||
dev_info(data, "No rfcomm device found");
|
||||
return DC_STATUS_NODEVICE;
|
||||
}
|
||||
|
||||
@ -1382,7 +1386,7 @@ dc_status_t divecomputer_device_open(device_data_t *data)
|
||||
|
||||
transports &= supported;
|
||||
if (!transports) {
|
||||
report_error("Dive computer transport not supported");
|
||||
dev_info(data, "Dive computer transport not supported");
|
||||
return DC_STATUS_UNSUPPORTED;
|
||||
}
|
||||
|
||||
@ -1499,18 +1503,19 @@ const char *do_libdivecomputer_import(device_data_t *data)
|
||||
rc = divecomputer_device_open(data);
|
||||
|
||||
if (rc != DC_STATUS_SUCCESS) {
|
||||
report_error("%s", errmsg(rc));
|
||||
dev_info(data, "Import error: %s", errmsg(rc));
|
||||
} else {
|
||||
dev_info(data, "Connecting ...");
|
||||
rc = dc_device_open(&data->device, data->context, data->descriptor, data->iostream);
|
||||
INFO(0, "dc_device_open error value of %d", rc);
|
||||
if (rc != DC_STATUS_SUCCESS && subsurface_access(data->devname, R_OK | W_OK) != 0)
|
||||
if (rc != DC_STATUS_SUCCESS) {
|
||||
INFO("dc_device_open error value of %d", rc);
|
||||
if (subsurface_access(data->devname, R_OK | W_OK) != 0)
|
||||
#if defined(SUBSURFACE_MOBILE)
|
||||
err = translate("gettextFromC", "Error opening the device %s %s (%s).\nIn most cases, in order to debug this issue, it is useful to send the developers the log files. You can copy them to the clipboard in the About dialog.");
|
||||
err = translate("gettextFromC", "Error opening the device %s %s (%s).\nIn most cases, in order to debug this issue, it is useful to send the developers the log files. You can copy them to the clipboard in the About dialog.");
|
||||
#else
|
||||
err = translate("gettextFromC", "Error opening the device %s %s (%s).\nIn most cases, in order to debug this issue, a libdivecomputer logfile will be useful.\nYou can create this logfile by selecting the corresponding checkbox in the download dialog.");
|
||||
err = translate("gettextFromC", "Error opening the device %s %s (%s).\nIn most cases, in order to debug this issue, a libdivecomputer logfile will be useful.\nYou can create this logfile by selecting the corresponding checkbox in the download dialog.");
|
||||
#endif
|
||||
if (rc == DC_STATUS_SUCCESS) {
|
||||
} else {
|
||||
dev_info(data, "Starting import ...");
|
||||
err = do_device_import(data);
|
||||
/* TODO: Show the logfile to the user on error. */
|
||||
@ -1611,12 +1616,12 @@ dc_status_t libdc_buffer_parser(struct dive *dive, device_data_t *data, unsigned
|
||||
if (dc_descriptor_get_type(data->descriptor) != DC_FAMILY_UWATEC_ALADIN && dc_descriptor_get_type(data->descriptor) != DC_FAMILY_UWATEC_MEMOMOUSE) {
|
||||
rc = libdc_header_parser (parser, data, dive);
|
||||
if (rc != DC_STATUS_SUCCESS) {
|
||||
report_error("Error parsing the dive header data. Dive # %d\nStatus = %s", dive->number, errmsg(rc));
|
||||
report_error("Error parsing the dive header data. Dive # %d: %s", dive->number, errmsg(rc));
|
||||
}
|
||||
}
|
||||
rc = dc_parser_samples_foreach (parser, sample_cb, &dive->dc);
|
||||
if (rc != DC_STATUS_SUCCESS) {
|
||||
report_error("Error parsing the sample data. Dive # %d\nStatus = %s", dive->number, errmsg(rc));
|
||||
report_error("Error parsing the sample data. Dive # %d: %s", dive->number, errmsg(rc));
|
||||
dc_parser_destroy (parser);
|
||||
return rc;
|
||||
}
|
||||
@ -1637,7 +1642,7 @@ dc_descriptor_t *get_descriptor(dc_family_t type, unsigned int model)
|
||||
|
||||
rc = dc_descriptor_iterator(&iterator);
|
||||
if (rc != DC_STATUS_SUCCESS) {
|
||||
fprintf(stderr, "Error creating the device descriptor iterator.\n");
|
||||
report_info("Error creating the device descriptor iterator: %s", errmsg(rc));
|
||||
return NULL;
|
||||
}
|
||||
while ((dc_iterator_next(iterator, &descriptor)) == DC_STATUS_SUCCESS) {
|
||||
|
||||
@ -6,9 +6,10 @@
|
||||
#include "divesite.h"
|
||||
#include "dive.h"
|
||||
#include "divelog.h"
|
||||
#include "errorhelper.h"
|
||||
#include "subsurface-string.h"
|
||||
#include "file.h"
|
||||
#include "sample.h"
|
||||
#include "strndup.h"
|
||||
|
||||
// Convert bytes into an INT
|
||||
#define array_uint16_le(p) ((unsigned int) (p)[0] \
|
||||
@ -174,28 +175,23 @@ static void parse_dives(int log_version, const unsigned char *buf, unsigned int
|
||||
|
||||
// Dive location, assemble Location and Place
|
||||
unsigned int len, place_len;
|
||||
char *location;
|
||||
std::string location;
|
||||
len = array_uint32_le(buf + ptr);
|
||||
ptr += 4;
|
||||
place_len = array_uint32_le(buf + ptr + len);
|
||||
|
||||
if (len && place_len) {
|
||||
location = (char *)malloc(len + place_len + 4);
|
||||
memset(location, 0, len + place_len + 4);
|
||||
memcpy(location, buf + ptr, len);
|
||||
memcpy(location + len, ", ", 2);
|
||||
memcpy(location + len + 2, buf + ptr + len + 4, place_len);
|
||||
location = std::string((char *)buf + ptr, len) + ", " +
|
||||
std::string((char *)buf + ptr + len + 4, place_len);
|
||||
} else if (len) {
|
||||
location = strndup((char *)buf + ptr, len);
|
||||
location = std::string((char *)buf + ptr, len);
|
||||
} else if (place_len) {
|
||||
location = strndup((char *)buf + ptr + len + 4, place_len);
|
||||
location = std::string((char *)buf + ptr + len + 4, place_len);
|
||||
}
|
||||
|
||||
/* Store the location only if we have one */
|
||||
if (len || place_len) {
|
||||
add_dive_to_dive_site(dive, find_or_create_dive_site_with_name(location, sites));
|
||||
free(location);
|
||||
}
|
||||
if (!location.empty())
|
||||
add_dive_to_dive_site(dive, find_or_create_dive_site_with_name(location.c_str(), sites));
|
||||
|
||||
ptr += len + 4 + place_len;
|
||||
|
||||
@ -204,9 +200,9 @@ static void parse_dives(int log_version, const unsigned char *buf, unsigned int
|
||||
ptr += 4;
|
||||
|
||||
// Blank notes are better than the default text
|
||||
if (len && strncmp((char *)buf + ptr, "Comment ...", 11)) {
|
||||
dive->notes = strndup((char *)buf + ptr, len);
|
||||
}
|
||||
std::string notes((char *)buf + ptr, len);
|
||||
if (!starts_with(notes, "Comment ..."))
|
||||
dive->notes = strdup(notes.c_str());
|
||||
ptr += len;
|
||||
|
||||
dive->id = array_uint32_le(buf + ptr);
|
||||
@ -279,11 +275,11 @@ static void parse_dives(int log_version, const unsigned char *buf, unsigned int
|
||||
}
|
||||
|
||||
if (sample_count == 0) {
|
||||
fprintf(stderr, "DEBUG: sample count 0 - terminating parser\n");
|
||||
report_info("DEBUG: sample count 0 - terminating parser");
|
||||
break;
|
||||
}
|
||||
if (ptr + sample_count * 4 + 4 > buf_size) {
|
||||
fprintf(stderr, "DEBUG: BOF - terminating parser\n");
|
||||
report_info("DEBUG: BOF - terminating parser");
|
||||
break;
|
||||
}
|
||||
// we aren't using the start_cns, dive_mode, and algorithm, yet
|
||||
|
||||
@ -23,6 +23,7 @@
|
||||
#include "errorhelper.h"
|
||||
#include "sample.h"
|
||||
#include "subsurface-string.h"
|
||||
#include "format.h"
|
||||
#include "trip.h"
|
||||
#include "device.h"
|
||||
#include "git-access.h"
|
||||
@ -1895,11 +1896,11 @@ static int do_git_load(git_repository *repo, const char *branch, struct git_pars
|
||||
return ret;
|
||||
}
|
||||
|
||||
std::string get_sha(git_repository *repo, const char *branch)
|
||||
std::string get_sha(git_repository *repo, const std::string &branch)
|
||||
{
|
||||
char git_id_buffer[GIT_OID_HEXSZ + 1];
|
||||
git_commit *commit;
|
||||
if (find_commit(repo, branch, &commit))
|
||||
if (find_commit(repo, branch.c_str(), &commit))
|
||||
return std::string();
|
||||
git_oid_tostr(git_id_buffer, sizeof(git_id_buffer), (const git_oid *)commit);
|
||||
return std::string(git_id_buffer);
|
||||
@ -1913,7 +1914,7 @@ std::string get_sha(git_repository *repo, const char *branch)
|
||||
* If it is a git repository, we return zero for success,
|
||||
* or report an error and return 1 if the load failed.
|
||||
*/
|
||||
extern "C" int git_load_dives(struct git_info *info, struct divelog *log)
|
||||
int git_load_dives(struct git_info *info, struct divelog *log)
|
||||
{
|
||||
int ret;
|
||||
struct git_parser_state state;
|
||||
@ -1921,8 +1922,8 @@ extern "C" int git_load_dives(struct git_info *info, struct divelog *log)
|
||||
state.log = log;
|
||||
|
||||
if (!info->repo)
|
||||
return report_error("Unable to open git repository '%s[%s]'", info->url, info->branch);
|
||||
ret = do_git_load(info->repo, info->branch, &state);
|
||||
return report_error("Unable to open git repository '%s[%s]'", info->url.c_str(), info->branch.c_str());
|
||||
ret = do_git_load(info->repo, info->branch.c_str(), &state);
|
||||
finish_active_dive(&state);
|
||||
finish_active_trip(&state);
|
||||
return ret;
|
||||
|
||||
@ -169,15 +169,6 @@ void put_format(struct membuffer *b, const char *fmt, ...)
|
||||
va_end(args);
|
||||
}
|
||||
|
||||
void put_format_loc(struct membuffer *b, const char *fmt, ...)
|
||||
{
|
||||
va_list args;
|
||||
|
||||
va_start(args, fmt);
|
||||
put_vformat_loc(b, fmt, args);
|
||||
va_end(args);
|
||||
}
|
||||
|
||||
void put_milli(struct membuffer *b, const char *pre, int value, const char *post)
|
||||
{
|
||||
int i;
|
||||
@ -219,7 +210,7 @@ void put_depth(struct membuffer *b, depth_t depth, const char *pre, const char *
|
||||
void put_duration(struct membuffer *b, duration_t duration, const char *pre, const char *post)
|
||||
{
|
||||
if (duration.seconds)
|
||||
put_format(b, "%s%u:%02u%s", pre, FRACTION(duration.seconds, 60), post);
|
||||
put_format(b, "%s%u:%02u%s", pre, FRACTION_TUPLE(duration.seconds, 60), post);
|
||||
}
|
||||
|
||||
void put_pressure(struct membuffer *b, pressure_t pressure, const char *pre, const char *post)
|
||||
@ -243,7 +234,7 @@ void put_degrees(struct membuffer *b, degrees_t value, const char *pre, const ch
|
||||
udeg = -udeg;
|
||||
sign = "-";
|
||||
}
|
||||
put_format(b, "%s%s%u.%06u%s", pre, sign, FRACTION(udeg, 1000000), post);
|
||||
put_format(b, "%s%s%u.%06u%s", pre, sign, FRACTION_TUPLE(udeg, 1000000), post);
|
||||
}
|
||||
|
||||
void put_location(struct membuffer *b, const location_t *loc, const char *pre, const char *post)
|
||||
@ -309,12 +300,10 @@ void put_quoted(struct membuffer *b, const char *text, int is_attribute, int is_
|
||||
char *add_to_string_va(char *old, const char *fmt, va_list args)
|
||||
{
|
||||
char *res;
|
||||
struct membuffer o = { 0 }, n = { 0 };
|
||||
struct membufferpp o, n;
|
||||
put_vformat(&n, fmt, args);
|
||||
put_format(&o, "%s\n%s", old ?: "", mb_cstring(&n));
|
||||
res = strdup(mb_cstring(&o));
|
||||
free_buffer(&o);
|
||||
free_buffer(&n);
|
||||
free((void *)old);
|
||||
return res;
|
||||
}
|
||||
|
||||
@ -75,9 +75,7 @@ extern void strip_mb(struct membuffer *);
|
||||
/* The pointer obtained by mb_cstring is invalidated by any modifictation to the membuffer! */
|
||||
extern const char *mb_cstring(struct membuffer *);
|
||||
extern __printf(2, 0) void put_vformat(struct membuffer *, const char *, va_list);
|
||||
extern __printf(2, 0) void put_vformat_loc(struct membuffer *, const char *, va_list);
|
||||
extern __printf(2, 3) void put_format(struct membuffer *, const char *fmt, ...);
|
||||
extern __printf(2, 3) void put_format_loc(struct membuffer *, const char *fmt, ...);
|
||||
extern __printf(2, 0) char *add_to_string_va(char *old, const char *fmt, va_list args);
|
||||
extern __printf(2, 3) char *add_to_string(char *old, const char *fmt, ...);
|
||||
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
// SPDX-License-Identifier: GPL-2.0
|
||||
#include "core/parse-gpx.h"
|
||||
#include "core/errorhelper.h"
|
||||
#include "core/subsurface-time.h"
|
||||
#include "core/namecmp.h"
|
||||
#include <QFile>
|
||||
@ -25,7 +26,7 @@ int getCoordsFromGPXFile(struct dive_coords *coords, const QString &fileName)
|
||||
if (!gpxFile.open(QIODevice::ReadOnly | QIODevice::Text)) {
|
||||
QByteArray local8bitBAString1 = fileName.toLocal8Bit();
|
||||
char *fname = local8bitBAString1.data(); // convert QString to a C string fileName
|
||||
fprintf(stderr, "GPS file open error: file name = %s\n", fname);
|
||||
report_info("GPS file open error: file name = %s", fname);
|
||||
return 1;
|
||||
}
|
||||
|
||||
@ -76,7 +77,7 @@ int getCoordsFromGPXFile(struct dive_coords *coords, const QString &fileName)
|
||||
|
||||
#ifdef GPSDEBUG
|
||||
utc_mkdate(trkpt_time, &time); // print coordinates and time of each trkpt element of the GPX file as well as dive start time
|
||||
fprintf(stderr, " %02d: lat=%f lon=%f timestamp=%ld (%ld) %02d/%02d/%02d %02d:%02d dt=%ld %02d/%02d/%02d %02d:%02d\n", line, lat,
|
||||
report_info(" %02d: lat=%f lon=%f timestamp=%ld (%ld) %02d/%02d/%02d %02d:%02d dt=%ld %02d/%02d/%02d %02d:%02d", line, lat,
|
||||
lon, trkpt_time, time_offset, time.tm_year, time.tm_mon+1, time.tm_mday, time.tm_hour, time.tm_min, divetime, dyr, dmon+1, dday,dhr, dmin);
|
||||
#endif
|
||||
|
||||
|
||||
@ -26,6 +26,7 @@
|
||||
#include "divesite.h"
|
||||
#include "errorhelper.h"
|
||||
#include "parse.h"
|
||||
#include "format.h"
|
||||
#include "subsurface-float.h"
|
||||
#include "subsurface-string.h"
|
||||
#include "subsurface-time.h"
|
||||
@ -55,7 +56,7 @@ static void divedate(const char *buffer, timestamp_t *when, struct parser_state
|
||||
} else if (sscanf(buffer, "%d-%d-%d %d:%d:%d", &y, &m, &d, &hh, &mm, &ss) >= 3) {
|
||||
/* This is also ok */
|
||||
} else {
|
||||
fprintf(stderr, "Unable to parse date '%s'\n", buffer);
|
||||
report_info("Unable to parse date '%s'", buffer);
|
||||
return;
|
||||
}
|
||||
state->cur_tm.tm_year = y;
|
||||
@ -165,7 +166,7 @@ static enum number_type parse_float(const char *buffer, double *res, const char
|
||||
* as this is likely indication of a bug - but right now we don't have
|
||||
* that information available */
|
||||
if (first_time) {
|
||||
fprintf(stderr, "Floating point value with decimal comma (%s)?\n", buffer);
|
||||
report_info("Floating point value with decimal comma (%s)?", buffer);
|
||||
first_time = false;
|
||||
}
|
||||
/* Try again in permissive mode*/
|
||||
@ -217,7 +218,7 @@ static void pressure(const char *buffer, pressure_t *pressure, struct parser_sta
|
||||
}
|
||||
/* fallthrough */
|
||||
default:
|
||||
printf("Strange pressure reading %s\n", buffer);
|
||||
report_info("Strange pressure reading %s", buffer);
|
||||
}
|
||||
}
|
||||
|
||||
@ -252,7 +253,7 @@ static void salinity(const char *buffer, int *salinity)
|
||||
*salinity = lrint(val.fp * 10.0);
|
||||
break;
|
||||
default:
|
||||
printf("Strange salinity reading %s\n", buffer);
|
||||
report_info("Strange salinity reading %s", buffer);
|
||||
}
|
||||
}
|
||||
|
||||
@ -272,7 +273,7 @@ static void depth(const char *buffer, depth_t *depth, struct parser_state *state
|
||||
}
|
||||
break;
|
||||
default:
|
||||
printf("Strange depth reading %s\n", buffer);
|
||||
report_info("Strange depth reading %s", buffer);
|
||||
}
|
||||
}
|
||||
|
||||
@ -305,7 +306,7 @@ static void weight(const char *buffer, weight_t *weight, struct parser_state *st
|
||||
}
|
||||
break;
|
||||
default:
|
||||
printf("Strange weight reading %s\n", buffer);
|
||||
report_info("Strange weight reading %s", buffer);
|
||||
}
|
||||
}
|
||||
|
||||
@ -328,7 +329,7 @@ static void temperature(const char *buffer, temperature_t *temperature, struct p
|
||||
}
|
||||
break;
|
||||
default:
|
||||
printf("Strange temperature reading %s\n", buffer);
|
||||
report_info("Strange temperature reading %s", buffer);
|
||||
}
|
||||
/* temperatures outside -40C .. +70C should be ignored */
|
||||
if (temperature->mkelvin < ZERO_C_IN_MKELVIN - 40000 ||
|
||||
@ -357,7 +358,7 @@ static void sampletime(const char *buffer, duration_t *time)
|
||||
break;
|
||||
default:
|
||||
time->seconds = 0;
|
||||
printf("Strange sample time reading %s\n", buffer);
|
||||
report_info("Strange sample time reading %s", buffer);
|
||||
}
|
||||
}
|
||||
|
||||
@ -411,7 +412,7 @@ static void percent(const char *buffer, fraction_t *fraction)
|
||||
break;
|
||||
}
|
||||
default:
|
||||
printf(translate("gettextFromC", "Strange percentage reading %s\n"), buffer);
|
||||
report_info(translate("gettextFromC", "Strange percentage reading %s"), buffer);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -439,7 +440,7 @@ static void cylindersize(const char *buffer, volume_t *volume)
|
||||
break;
|
||||
|
||||
default:
|
||||
printf("Strange volume reading %s\n", buffer);
|
||||
report_info("Strange volume reading %s", buffer);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -619,7 +620,7 @@ static void fahrenheit(const char *buffer, temperature_t *temperature)
|
||||
temperature->mkelvin = F_to_mkelvin(val.fp);
|
||||
break;
|
||||
default:
|
||||
fprintf(stderr, "Crazy Diving Log temperature reading %s\n", buffer);
|
||||
report_info("Crazy Diving Log temperature reading %s", buffer);
|
||||
}
|
||||
}
|
||||
|
||||
@ -655,7 +656,7 @@ static void psi_or_bar(const char *buffer, pressure_t *pressure)
|
||||
pressure->mbar = lrint(val.fp * 1000);
|
||||
break;
|
||||
default:
|
||||
fprintf(stderr, "Crazy Diving Log PSI reading %s\n", buffer);
|
||||
report_info("Crazy Diving Log PSI reading %s", buffer);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1068,7 +1069,7 @@ static void uddf_datetime(const char *buffer, timestamp_t *when, struct parser_s
|
||||
if (i == 6)
|
||||
goto success;
|
||||
bad_date:
|
||||
printf("Bad date time %s\n", buffer);
|
||||
report_info("Bad date time %s", buffer);
|
||||
return;
|
||||
|
||||
success:
|
||||
@ -1168,7 +1169,7 @@ static void gps_lat(const char *buffer, struct dive *dive, struct parser_state *
|
||||
add_dive_to_dive_site(dive, create_dive_site_with_gps(NULL, &location, state->log->sites));
|
||||
} else {
|
||||
if (ds->location.lat.udeg && ds->location.lat.udeg != location.lat.udeg)
|
||||
fprintf(stderr, "Oops, changing the latitude of existing dive site id %8x name %s; not good\n", ds->uuid, ds->name ?: "(unknown)");
|
||||
report_info("Oops, changing the latitude of existing dive site id %8x name %s; not good", ds->uuid, ds->name ?: "(unknown)");
|
||||
ds->location.lat = location.lat;
|
||||
}
|
||||
}
|
||||
@ -1184,7 +1185,7 @@ static void gps_long(const char *buffer, struct dive *dive, struct parser_state
|
||||
add_dive_to_dive_site(dive, create_dive_site_with_gps(NULL, &location, state->log->sites));
|
||||
} else {
|
||||
if (ds->location.lon.udeg && ds->location.lon.udeg != location.lon.udeg)
|
||||
fprintf(stderr, "Oops, changing the longitude of existing dive site id %8x name %s; not good\n", ds->uuid, ds->name ?: "(unknown)");
|
||||
report_info("Oops, changing the longitude of existing dive site id %8x name %s; not good", ds->uuid, ds->name ?: "(unknown)");
|
||||
ds->location.lon = location.lon;
|
||||
}
|
||||
}
|
||||
@ -1225,7 +1226,7 @@ static void gps_in_dive(const char *buffer, struct dive *dive, struct parser_sta
|
||||
if (dive_site_has_gps_location(ds) &&
|
||||
has_location(&location) && !same_location(&ds->location, &location)) {
|
||||
// Houston, we have a problem
|
||||
fprintf(stderr, "dive site uuid in dive, but gps location (%10.6f/%10.6f) different from dive location (%10.6f/%10.6f)\n",
|
||||
report_info("dive site uuid in dive, but gps location (%10.6f/%10.6f) different from dive location (%10.6f/%10.6f)",
|
||||
ds->location.lat.udeg / 1000000.0, ds->location.lon.udeg / 1000000.0,
|
||||
location.lat.udeg / 1000000.0, location.lon.udeg / 1000000.0);
|
||||
std::string coords = printGPSCoordsC(&location);
|
||||
@ -2218,11 +2219,11 @@ extern "C" int parse_dlf_buffer(unsigned char *buffer, size_t size, struct divel
|
||||
break;
|
||||
case 2:
|
||||
/* Measure He */
|
||||
//printf("%ds he2 cells(0.01 mV): %d %d\n", time, (ptr[5] << 8) + ptr[4], (ptr[9] << 8) + ptr[8]);
|
||||
//report_info("%ds he2 cells(0.01 mV): %d %d", time, (ptr[5] << 8) + ptr[4], (ptr[9] << 8) + ptr[8]);
|
||||
break;
|
||||
case 3:
|
||||
/* Measure Oxygen */
|
||||
//printf("%d s: o2 cells(0.01 mV): %d %d %d %d\n", time, (ptr[5] << 8) + ptr[4], (ptr[7] << 8) + ptr[6], (ptr[9] << 8) + ptr[8], (ptr[11] << 8) + ptr[10]);
|
||||
//report_info("%d s: o2 cells(0.01 mV): %d %d %d %d", time, (ptr[5] << 8) + ptr[4], (ptr[7] << 8) + ptr[6], (ptr[9] << 8) + ptr[8], (ptr[11] << 8) + ptr[10]);
|
||||
// [Pa/mV] coeficient O2
|
||||
// 100 Pa == 1 mbar
|
||||
sample_start(&state);
|
||||
|
||||
@ -71,6 +71,7 @@ int get_picture_idx(const struct picture_table *t, const char *filename)
|
||||
return -1;
|
||||
}
|
||||
|
||||
#if !defined(SUBSURFACE_MOBILE)
|
||||
/* Return distance of timestamp to time of dive. Result is always positive, 0 means during dive. */
|
||||
static timestamp_t time_from_dive(const struct dive *d, timestamp_t timestamp)
|
||||
{
|
||||
@ -118,7 +119,6 @@ static bool dive_check_picture_time(const struct dive *d, timestamp_t timestamp)
|
||||
return time_from_dive(d, timestamp) < D30MIN;
|
||||
}
|
||||
|
||||
#if !defined(SUBSURFACE_MOBILE)
|
||||
/* Creates a picture and indicates the dive to which this picture should be added.
|
||||
* The caller is responsible for actually adding the picture to the dive.
|
||||
* If no appropriate dive was found, no picture is created and NULL is returned.
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
// SPDX-License-Identifier: GPL-2.0
|
||||
/* planner.c
|
||||
/* planner.cpp
|
||||
*
|
||||
* code that allows us to plan future dives
|
||||
*
|
||||
@ -26,7 +26,7 @@
|
||||
#include "qthelper.h"
|
||||
#include "version.h"
|
||||
|
||||
#define TIMESTEP 2 /* second */
|
||||
static constexpr int base_timestep = 2; // seconds
|
||||
|
||||
static int decostoplevels_metric[] = { 0, 3000, 6000, 9000, 12000, 15000, 18000, 21000, 24000, 27000,
|
||||
30000, 33000, 36000, 39000, 42000, 45000, 48000, 51000, 54000, 57000,
|
||||
@ -42,7 +42,7 @@ static int decostoplevels_imperial[] = { 0, 3048, 6096, 9144, 12192, 15240, 1828
|
||||
325120, 345440, 365760, 386080 };
|
||||
|
||||
#if DEBUG_PLAN
|
||||
void dump_plan(struct diveplan *diveplan)
|
||||
extern "C" void dump_plan(struct diveplan *diveplan)
|
||||
{
|
||||
struct divedatapoint *dp;
|
||||
struct tm tm;
|
||||
@ -59,13 +59,13 @@ void dump_plan(struct diveplan *diveplan)
|
||||
diveplan->surface_pressure);
|
||||
dp = diveplan->dp;
|
||||
while (dp) {
|
||||
printf("\t%3u:%02u: %6dmm cylid: %2d setpoint: %d\n", FRACTION(dp->time, 60), dp->depth, dp->cylinderid, dp->setpoint);
|
||||
printf("\t%3u:%02u: %6dmm cylid: %2d setpoint: %d\n", FRACTION_TUPLE(dp->time, 60), dp->depth, dp->cylinderid, dp->setpoint);
|
||||
dp = dp->next;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
bool diveplan_empty(struct diveplan *diveplan)
|
||||
extern "C" bool diveplan_empty(struct diveplan *diveplan)
|
||||
{
|
||||
struct divedatapoint *dp;
|
||||
if (!diveplan || !diveplan->dp)
|
||||
@ -80,7 +80,7 @@ bool diveplan_empty(struct diveplan *diveplan)
|
||||
}
|
||||
|
||||
/* get the cylinder index at a certain time during the dive */
|
||||
int get_cylinderid_at_time(struct dive *dive, struct divecomputer *dc, duration_t time)
|
||||
extern "C" int get_cylinderid_at_time(struct dive *dive, struct divecomputer *dc, duration_t time)
|
||||
{
|
||||
// we start with the first cylinder unless an event tells us otherwise
|
||||
int cylinder_idx = 0;
|
||||
@ -111,9 +111,8 @@ static void interpolate_transition(struct deco_state *ds, struct dive *dive, dur
|
||||
}
|
||||
|
||||
/* returns the tissue tolerance at the end of this (partial) dive */
|
||||
static int tissue_at_end(struct deco_state *ds, struct dive *dive, struct deco_state **cached_datap)
|
||||
static int tissue_at_end(struct deco_state *ds, struct dive *dive, const struct divecomputer *dc, deco_state_cache &cache)
|
||||
{
|
||||
struct divecomputer *dc;
|
||||
struct sample *sample, *psample;
|
||||
int i;
|
||||
depth_t lastdepth = {};
|
||||
@ -123,13 +122,12 @@ static int tissue_at_end(struct deco_state *ds, struct dive *dive, struct deco_s
|
||||
|
||||
if (!dive)
|
||||
return 0;
|
||||
if (*cached_datap) {
|
||||
restore_deco_state(*cached_datap, ds, true);
|
||||
if (cache) {
|
||||
cache.restore(ds, true);
|
||||
} else {
|
||||
surface_interval = init_decompression(ds, dive, true);
|
||||
cache_deco_state(ds, cached_datap);
|
||||
cache.cache(ds);
|
||||
}
|
||||
dc = &dive->dc;
|
||||
if (!dc->samples)
|
||||
return 0;
|
||||
psample = sample = dc->sample;
|
||||
@ -208,10 +206,9 @@ static void update_cylinder_pressure(struct dive *d, int old_depth, int new_dept
|
||||
|
||||
/* overwrite the data in dive
|
||||
* return false if something goes wrong */
|
||||
static void create_dive_from_plan(struct diveplan *diveplan, struct dive *dive, bool track_gas)
|
||||
static void create_dive_from_plan(struct diveplan *diveplan, struct dive *dive, struct divecomputer *dc, bool track_gas)
|
||||
{
|
||||
struct divedatapoint *dp;
|
||||
struct divecomputer *dc;
|
||||
struct sample *sample;
|
||||
struct event *ev;
|
||||
cylinder_t *cyl;
|
||||
@ -219,7 +216,7 @@ static void create_dive_from_plan(struct diveplan *diveplan, struct dive *dive,
|
||||
int lasttime = 0, last_manual_point = 0;
|
||||
depth_t lastdepth = {.mm = 0};
|
||||
int lastcylid;
|
||||
enum divemode_t type = dive->dc.divemode;
|
||||
enum divemode_t type = dc->divemode;
|
||||
|
||||
if (!diveplan || !diveplan->dp)
|
||||
return;
|
||||
@ -231,7 +228,6 @@ static void create_dive_from_plan(struct diveplan *diveplan, struct dive *dive,
|
||||
// reset the cylinders and clear out the samples and events of the
|
||||
// dive-to-be-planned so we can restart
|
||||
reset_cylinders(dive, track_gas);
|
||||
dc = &dive->dc;
|
||||
dc->when = dive->when = diveplan->when;
|
||||
dc->surface_pressure.mbar = diveplan->surface_pressure;
|
||||
dc->salinity = diveplan->salinity;
|
||||
@ -319,7 +315,7 @@ static void create_dive_from_plan(struct diveplan *diveplan, struct dive *dive,
|
||||
finish_sample(dc);
|
||||
dp = dp->next;
|
||||
}
|
||||
dive->dc.last_manual_time.seconds = last_manual_point;
|
||||
dc->last_manual_time.seconds = last_manual_point;
|
||||
|
||||
#if DEBUG_PLAN & 32
|
||||
save_dive(stdout, dive);
|
||||
@ -327,7 +323,7 @@ static void create_dive_from_plan(struct diveplan *diveplan, struct dive *dive,
|
||||
return;
|
||||
}
|
||||
|
||||
void free_dps(struct diveplan *diveplan)
|
||||
extern "C" void free_dps(struct diveplan *diveplan)
|
||||
{
|
||||
if (!diveplan)
|
||||
return;
|
||||
@ -344,7 +340,7 @@ static struct divedatapoint *create_dp(int time_incr, int depth, int cylinderid,
|
||||
{
|
||||
struct divedatapoint *dp;
|
||||
|
||||
dp = malloc(sizeof(struct divedatapoint));
|
||||
dp = (divedatapoint *)malloc(sizeof(struct divedatapoint));
|
||||
dp->time = time_incr;
|
||||
dp->depth.mm = depth;
|
||||
dp->cylinderid = cylinderid;
|
||||
@ -371,7 +367,7 @@ static void add_to_end_of_diveplan(struct diveplan *diveplan, struct divedatapoi
|
||||
dp->time += lasttime;
|
||||
}
|
||||
|
||||
struct divedatapoint *plan_add_segment(struct diveplan *diveplan, int duration, int depth, int cylinderid, int po2, bool entered, enum divemode_t divemode)
|
||||
extern "C" struct divedatapoint *plan_add_segment(struct diveplan *diveplan, int duration, int depth, int cylinderid, int po2, bool entered, enum divemode_t divemode)
|
||||
{
|
||||
struct divedatapoint *dp = create_dp(duration, depth, cylinderid, divemode == CCR ? po2 : 0);
|
||||
dp->entered = entered;
|
||||
@ -401,10 +397,10 @@ static int setpoint_change(struct dive *dive, int cylinderid)
|
||||
}
|
||||
}
|
||||
|
||||
static struct gaschanges *analyze_gaslist(struct diveplan *diveplan, struct dive *dive, int *gaschangenr, int depth, int *asc_cylinder, bool ccr)
|
||||
static std::vector<gaschanges> analyze_gaslist(struct diveplan *diveplan, struct dive *dive, int depth, int *asc_cylinder, bool ccr)
|
||||
{
|
||||
int nr = 0;
|
||||
struct gaschanges *gaschanges = NULL;
|
||||
size_t nr = 0;
|
||||
std::vector<gaschanges> gaschanges;
|
||||
struct divedatapoint *dp = diveplan->dp;
|
||||
struct divedatapoint *best_ascent_dp = NULL;
|
||||
bool total_time_zero = true;
|
||||
@ -413,10 +409,11 @@ static struct gaschanges *analyze_gaslist(struct diveplan *diveplan, struct dive
|
||||
if (dp->depth.mm <= depth) {
|
||||
int i = 0;
|
||||
nr++;
|
||||
gaschanges = realloc(gaschanges, nr * sizeof(struct gaschanges));
|
||||
while (i < nr - 1) {
|
||||
gaschanges.resize(nr);
|
||||
while (i < static_cast<int>(nr) - 1) {
|
||||
if (dp->depth.mm < gaschanges[i].depth) {
|
||||
memmove(gaschanges + i + 1, gaschanges + i, (nr - i - 1) * sizeof(struct gaschanges));
|
||||
for (int j = static_cast<int>(nr) - 2; j >= i; j--)
|
||||
gaschanges[j + 1] = gaschanges[j];
|
||||
break;
|
||||
}
|
||||
i++;
|
||||
@ -435,12 +432,11 @@ static struct gaschanges *analyze_gaslist(struct diveplan *diveplan, struct dive
|
||||
}
|
||||
dp = dp->next;
|
||||
}
|
||||
*gaschangenr = nr;
|
||||
if (best_ascent_dp) {
|
||||
*asc_cylinder = best_ascent_dp->cylinderid;
|
||||
}
|
||||
#if DEBUG_PLAN & 16
|
||||
for (nr = 0; nr < *gaschangenr; nr++) {
|
||||
for (size_t nr = 0; nr < gaschanges.size(); nr++) {
|
||||
int idx = gaschanges[nr].gasidx;
|
||||
printf("gaschange nr %d: @ %5.2lfm gasidx %d (%s)\n", nr, gaschanges[nr].depth / 1000.0,
|
||||
idx, gasname(&get_cylinder(&dive, idx)->gasmix));
|
||||
@ -450,20 +446,23 @@ static struct gaschanges *analyze_gaslist(struct diveplan *diveplan, struct dive
|
||||
}
|
||||
|
||||
/* sort all the stops into one ordered list */
|
||||
static int *sort_stops(int *dstops, int dnr, struct gaschanges *gstops, int gnr)
|
||||
static std::vector<int> sort_stops(int dstops[], size_t dnr, std::vector<gaschanges> gstops)
|
||||
{
|
||||
int i, gi, di;
|
||||
int total = dnr + gnr;
|
||||
int *stoplevels = malloc(total * sizeof(int));
|
||||
int total = dnr + gstops.size();
|
||||
std::vector<int> stoplevels(total);
|
||||
|
||||
/* Can't happen. */
|
||||
if (dnr == 0)
|
||||
return std::vector<int>();
|
||||
|
||||
/* no gaschanges */
|
||||
if (gnr == 0) {
|
||||
memcpy(stoplevels, dstops, dnr * sizeof(int));
|
||||
if (gstops.empty()) {
|
||||
std::copy(dstops, dstops + dnr, stoplevels.begin());
|
||||
return stoplevels;
|
||||
}
|
||||
i = total - 1;
|
||||
gi = gnr - 1;
|
||||
di = dnr - 1;
|
||||
int i = static_cast<int>(total) - 1;
|
||||
int gi = static_cast<int>(gstops.size()) - 1;
|
||||
int di = static_cast<int>(dnr) - 1;
|
||||
while (i >= 0) {
|
||||
if (dstops[di] > gstops[gi].depth) {
|
||||
stoplevels[i] = dstops[di];
|
||||
@ -493,7 +492,7 @@ static int *sort_stops(int *dstops, int dnr, struct gaschanges *gstops, int gnr)
|
||||
|
||||
#if DEBUG_PLAN & 16
|
||||
int k;
|
||||
for (k = gnr + dnr - 1; k >= 0; k--) {
|
||||
for (k = static_cast<int>(gstops.size()) + dnr - 1; k >= 0; k--) {
|
||||
printf("stoplevel[%d]: %5.2lfm\n", k, stoplevels[k] / 1000.0);
|
||||
if (stoplevels[k] == 0)
|
||||
break;
|
||||
@ -502,9 +501,8 @@ static int *sort_stops(int *dstops, int dnr, struct gaschanges *gstops, int gnr)
|
||||
return stoplevels;
|
||||
}
|
||||
|
||||
int ascent_velocity(int depth, int avg_depth, int bottom_time)
|
||||
extern "C" int ascent_velocity(int depth, int avg_depth, int)
|
||||
{
|
||||
UNUSED(bottom_time);
|
||||
/* We need to make this configurable */
|
||||
|
||||
/* As an example (and possibly reasonable default) this is the Tech 1 provedure according
|
||||
@ -528,10 +526,10 @@ static void track_ascent_gas(int depth, struct dive *dive, int cylinder_id, int
|
||||
{
|
||||
cylinder_t *cylinder = get_cylinder(dive, cylinder_id);
|
||||
while (depth > 0) {
|
||||
int deltad = ascent_velocity(depth, avg_depth, bottom_time) * TIMESTEP;
|
||||
int deltad = ascent_velocity(depth, avg_depth, bottom_time) * base_timestep;
|
||||
if (deltad > depth)
|
||||
deltad = depth;
|
||||
update_cylinder_pressure(dive, depth, depth - deltad, TIMESTEP, prefs.decosac, cylinder, true, divemode);
|
||||
update_cylinder_pressure(dive, depth, depth - deltad, base_timestep, prefs.decosac, cylinder, true, divemode);
|
||||
if (depth <= 5000 && depth >= (5000 - deltad) && safety_stop) {
|
||||
update_cylinder_pressure(dive, 5000, 5000, 180, prefs.decosac, cylinder, true, divemode);
|
||||
safety_stop = false;
|
||||
@ -545,12 +543,12 @@ static bool trial_ascent(struct deco_state *ds, int wait_time, int trial_depth,
|
||||
{
|
||||
|
||||
bool clear_to_ascend = true;
|
||||
struct deco_state *trial_cache = NULL;
|
||||
deco_state_cache trial_cache;
|
||||
|
||||
// For consistency with other VPM-B implementations, we should not start the ascent while the ceiling is
|
||||
// deeper than the next stop (thus the offgasing during the ascent is ignored).
|
||||
// However, we still need to make sure we don't break the ceiling due to on-gassing during ascent.
|
||||
cache_deco_state(ds, &trial_cache);
|
||||
trial_cache.cache(ds);
|
||||
if (wait_time)
|
||||
add_segment(ds, depth_to_bar(trial_depth, dive),
|
||||
gasmix,
|
||||
@ -559,20 +557,19 @@ static bool trial_ascent(struct deco_state *ds, int wait_time, int trial_depth,
|
||||
double tolerance_limit = tissue_tolerance_calc(ds, dive, depth_to_bar(stoplevel, dive), true);
|
||||
update_regression(ds, dive);
|
||||
if (deco_allowed_depth(tolerance_limit, surface_pressure, dive, 1) > stoplevel) {
|
||||
restore_deco_state(trial_cache, ds, false);
|
||||
free(trial_cache);
|
||||
trial_cache.restore(ds, false);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
while (trial_depth > stoplevel) {
|
||||
double tolerance_limit;
|
||||
int deltad = ascent_velocity(trial_depth, avg_depth, bottom_time) * TIMESTEP;
|
||||
int deltad = ascent_velocity(trial_depth, avg_depth, bottom_time) * base_timestep;
|
||||
if (deltad > trial_depth) /* don't test against depth above surface */
|
||||
deltad = trial_depth;
|
||||
add_segment(ds, depth_to_bar(trial_depth, dive),
|
||||
gasmix,
|
||||
TIMESTEP, po2, divemode, prefs.decosac, true);
|
||||
base_timestep, po2, divemode, prefs.decosac, true);
|
||||
tolerance_limit = tissue_tolerance_calc(ds, dive, depth_to_bar(trial_depth, dive), true);
|
||||
if (decoMode(true) == VPMB)
|
||||
update_regression(ds, dive);
|
||||
@ -583,8 +580,7 @@ static bool trial_ascent(struct deco_state *ds, int wait_time, int trial_depth,
|
||||
}
|
||||
trial_depth -= deltad;
|
||||
}
|
||||
restore_deco_state(trial_cache, ds, false);
|
||||
free(trial_cache);
|
||||
trial_cache.restore(ds, false);
|
||||
return clear_to_ascend;
|
||||
}
|
||||
|
||||
@ -655,7 +651,7 @@ static void average_max_depth(struct diveplan *dive, int *avg_depth, int *max_de
|
||||
*avg_depth = *max_depth = 0;
|
||||
}
|
||||
|
||||
bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, int timestep, struct decostop *decostoptable, struct deco_state **cached_datap, bool is_planner, bool show_disclaimer)
|
||||
bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, int dcNr, int timestep, struct decostop *decostoptable, deco_state_cache &cache, bool is_planner, bool show_disclaimer)
|
||||
{
|
||||
|
||||
int bottom_depth;
|
||||
@ -664,18 +660,16 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
bool is_final_plan = true;
|
||||
int bottom_time;
|
||||
int previous_deco_time;
|
||||
struct deco_state *bottom_cache = NULL;
|
||||
deco_state_cache bottom_cache;
|
||||
struct sample *sample;
|
||||
int po2;
|
||||
int transitiontime, gi;
|
||||
int current_cylinder, stop_cylinder;
|
||||
int stopidx;
|
||||
size_t stopidx;
|
||||
int depth;
|
||||
struct gaschanges *gaschanges = NULL;
|
||||
int gaschangenr;
|
||||
int *decostoplevels;
|
||||
int decostoplevelcount;
|
||||
int *stoplevels = NULL;
|
||||
size_t decostoplevelcount;
|
||||
std::vector<int> stoplevels;
|
||||
bool stopping = false;
|
||||
bool pendinggaschange = false;
|
||||
int clock, previous_point_time;
|
||||
@ -686,21 +680,22 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
bool o2break_next = false;
|
||||
int break_cylinder = -1, breakfrom_cylinder = 0;
|
||||
bool last_segment_min_switch = false;
|
||||
int error = 0;
|
||||
bool error = false;
|
||||
bool decodive = false;
|
||||
int first_stop_depth = 0;
|
||||
int laststoptime = timestep;
|
||||
bool o2breaking = false;
|
||||
int decostopcounter = 0;
|
||||
enum divemode_t divemode = dive->dc.divemode;
|
||||
struct divecomputer *dc = get_dive_dc(dive, dcNr);
|
||||
enum divemode_t divemode = dc->divemode;
|
||||
|
||||
set_gf(diveplan->gflow, diveplan->gfhigh);
|
||||
set_vpmb_conservatism(diveplan->vpmb_conservatism);
|
||||
|
||||
if (!diveplan->surface_pressure) {
|
||||
// Lets use dive's surface pressure in planner, if have one...
|
||||
if (dive->dc.surface_pressure.mbar) { // First from DC...
|
||||
diveplan->surface_pressure = dive->dc.surface_pressure.mbar;
|
||||
if (dc->surface_pressure.mbar) { // First from DC...
|
||||
diveplan->surface_pressure = dc->surface_pressure.mbar;
|
||||
}
|
||||
else if (dive->surface_pressure.mbar) { // After from user...
|
||||
diveplan->surface_pressure = dive->surface_pressure.mbar;
|
||||
@ -709,22 +704,23 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
diveplan->surface_pressure = SURFACE_PRESSURE;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
clear_deco(ds, dive->surface_pressure.mbar / 1000.0, true);
|
||||
ds->max_bottom_ceiling_pressure.mbar = ds->first_ceiling_pressure.mbar = 0;
|
||||
create_dive_from_plan(diveplan, dive, is_planner);
|
||||
create_dive_from_plan(diveplan, dive, dc, is_planner);
|
||||
|
||||
// Do we want deco stop array in metres or feet?
|
||||
if (prefs.units.length == METERS ) {
|
||||
if (prefs.units.length == units::METERS ) {
|
||||
decostoplevels = decostoplevels_metric;
|
||||
decostoplevelcount = sizeof(decostoplevels_metric) / sizeof(int);
|
||||
decostoplevelcount = std::size(decostoplevels_metric);
|
||||
} else {
|
||||
decostoplevels = decostoplevels_imperial;
|
||||
decostoplevelcount = sizeof(decostoplevels_imperial) / sizeof(int);
|
||||
decostoplevelcount = std::size(decostoplevels_imperial);
|
||||
}
|
||||
|
||||
/* If the user has selected last stop to be at 6m/20', we need to get rid of the 3m/10' stop.
|
||||
* Otherwise reinstate the last stop 3m/10' stop.
|
||||
* Remark: not reentrant, but the user probably won't change preferences while this is running.
|
||||
*/
|
||||
if (prefs.last_stop)
|
||||
*(decostoplevels + 1) = 0;
|
||||
@ -732,20 +728,20 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
*(decostoplevels + 1) = M_OR_FT(3,10);
|
||||
|
||||
/* Let's start at the last 'sample', i.e. the last manually entered waypoint. */
|
||||
sample = &dive->dc.sample[dive->dc.samples - 1];
|
||||
sample = &dc->sample[dc->samples - 1];
|
||||
|
||||
/* Keep time during the ascend */
|
||||
bottom_time = clock = previous_point_time = dive->dc.sample[dive->dc.samples - 1].time.seconds;
|
||||
bottom_time = clock = previous_point_time = dc->sample[dc->samples - 1].time.seconds;
|
||||
|
||||
current_cylinder = get_cylinderid_at_time(dive, &dive->dc, sample->time);
|
||||
current_cylinder = get_cylinderid_at_time(dive, dc, sample->time);
|
||||
// Find the divemode at the end of the dive
|
||||
const struct event *ev = NULL;
|
||||
divemode = UNDEF_COMP_TYPE;
|
||||
divemode = get_current_divemode(&dive->dc, bottom_time, &ev, &divemode);
|
||||
divemode = get_current_divemode(dc, bottom_time, &ev, &divemode);
|
||||
gas = get_cylinder(dive, current_cylinder)->gasmix;
|
||||
|
||||
po2 = sample->setpoint.mbar;
|
||||
depth = dive->dc.sample[dive->dc.samples - 1].depth.mm;
|
||||
depth = dc->sample[dc->samples - 1].depth.mm;
|
||||
average_max_depth(diveplan, &avg_depth, &max_depth);
|
||||
last_ascend_rate = ascent_velocity(depth, avg_depth, bottom_time);
|
||||
|
||||
@ -756,7 +752,7 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
*/
|
||||
transitiontime = lrint(depth / (double)prefs.ascratelast6m);
|
||||
plan_add_segment(diveplan, transitiontime, 0, current_cylinder, po2, false, divemode);
|
||||
create_dive_from_plan(diveplan, dive, is_planner);
|
||||
create_dive_from_plan(diveplan, dive, dc, is_planner);
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -768,7 +764,7 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
|
||||
/* Find the gases available for deco */
|
||||
|
||||
gaschanges = analyze_gaslist(diveplan, dive, &gaschangenr, depth, &best_first_ascend_cylinder, divemode == CCR && !prefs.dobailout);
|
||||
std::vector<gaschanges> gaschanges = analyze_gaslist(diveplan, dive, depth, &best_first_ascend_cylinder, divemode == CCR && !prefs.dobailout);
|
||||
|
||||
/* Find the first potential decostopdepth above current depth */
|
||||
for (stopidx = 0; stopidx < decostoplevelcount; stopidx++)
|
||||
@ -777,13 +773,13 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
if (stopidx > 0)
|
||||
stopidx--;
|
||||
/* Stoplevels are either depths of gas changes or potential deco stop depths. */
|
||||
stoplevels = sort_stops(decostoplevels, stopidx + 1, gaschanges, gaschangenr);
|
||||
stopidx += gaschangenr;
|
||||
stoplevels = sort_stops(decostoplevels, stopidx + 1, gaschanges);
|
||||
stopidx += gaschanges.size();
|
||||
|
||||
gi = gaschangenr - 1;
|
||||
gi = static_cast<int>(gaschanges.size()) - 1;
|
||||
|
||||
/* Set tissue tolerance and initial vpmb gradient at start of ascent phase */
|
||||
diveplan->surface_interval = tissue_at_end(ds, dive, cached_datap);
|
||||
diveplan->surface_interval = tissue_at_end(ds, dive, dc, cache);
|
||||
nuclear_regeneration(ds, clock);
|
||||
vpmb_start_gradient(ds);
|
||||
if (decoMode(true) == RECREATIONAL) {
|
||||
@ -800,7 +796,7 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
po2, diveplan->surface_pressure / 1000.0, dive, divemode) &&
|
||||
enough_gas(dive, current_cylinder) && clock < 6 * 3600);
|
||||
|
||||
// We did stay one DECOTIMESTEP too many.
|
||||
// We did stay one timestep too many.
|
||||
// In the best of all worlds, we would roll back also the last add_segment in terms of caching deco state, but
|
||||
// let's ignore that since for the eventual ascent in recreational mode, nobody looks at the ceiling anymore,
|
||||
// so we don't really have to compute the deco state.
|
||||
@ -810,7 +806,7 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
previous_point_time = clock;
|
||||
do {
|
||||
/* Ascend to surface */
|
||||
int deltad = ascent_velocity(depth, avg_depth, bottom_time) * TIMESTEP;
|
||||
int deltad = ascent_velocity(depth, avg_depth, bottom_time) * base_timestep;
|
||||
if (ascent_velocity(depth, avg_depth, bottom_time) != last_ascend_rate) {
|
||||
plan_add_segment(diveplan, clock - previous_point_time, depth, current_cylinder, po2, false, divemode);
|
||||
previous_point_time = clock;
|
||||
@ -819,7 +815,7 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
if (depth - deltad < 0)
|
||||
deltad = depth;
|
||||
|
||||
clock += TIMESTEP;
|
||||
clock += base_timestep;
|
||||
depth -= deltad;
|
||||
if (depth <= 5000 && depth >= (5000 - deltad) && safety_stop) {
|
||||
plan_add_segment(diveplan, clock - previous_point_time, 5000, current_cylinder, po2, false, divemode);
|
||||
@ -831,12 +827,10 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
}
|
||||
} while (depth > 0);
|
||||
plan_add_segment(diveplan, clock - previous_point_time, 0, current_cylinder, po2, false, divemode);
|
||||
create_dive_from_plan(diveplan, dive, is_planner);
|
||||
create_dive_from_plan(diveplan, dive, dc, is_planner);
|
||||
add_plan_to_notes(diveplan, dive, show_disclaimer, error);
|
||||
fixup_dc_duration(&dive->dc);
|
||||
fixup_dc_duration(dc);
|
||||
|
||||
free(stoplevels);
|
||||
free(gaschanges);
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -851,21 +845,20 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
}
|
||||
|
||||
// VPM-B or Buehlmann Deco
|
||||
tissue_at_end(ds, dive, cached_datap);
|
||||
tissue_at_end(ds, dive, dc, cache);
|
||||
if ((divemode == CCR || divemode == PSCR) && prefs.dobailout) {
|
||||
divemode = OC;
|
||||
po2 = 0;
|
||||
int bailoutsegment = MAX(prefs.min_switch_duration, 60 * prefs.problemsolvingtime);
|
||||
int bailoutsegment = std::max(prefs.min_switch_duration, 60 * prefs.problemsolvingtime);
|
||||
add_segment(ds, depth_to_bar(depth, dive),
|
||||
get_cylinder(dive, current_cylinder)->gasmix,
|
||||
bailoutsegment, po2, divemode, prefs.bottomsac, true);
|
||||
plan_add_segment(diveplan, bailoutsegment, depth, current_cylinder, po2, false, divemode);
|
||||
bottom_time += bailoutsegment;
|
||||
last_segment_min_switch = true;
|
||||
}
|
||||
previous_deco_time = 100000000;
|
||||
ds->deco_time = 10000000;
|
||||
cache_deco_state(ds, &bottom_cache); // Lets us make several iterations
|
||||
bottom_cache.cache(ds); // Lets us make several iterations
|
||||
bottom_depth = depth;
|
||||
bottom_gi = gi;
|
||||
bottom_gas = gas;
|
||||
@ -879,7 +872,7 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
vpmb_next_gradient(ds, ds->deco_time, diveplan->surface_pressure / 1000.0, true);
|
||||
|
||||
previous_deco_time = ds->deco_time;
|
||||
restore_deco_state(bottom_cache, ds, true);
|
||||
bottom_cache.restore(ds, true);
|
||||
|
||||
depth = bottom_depth;
|
||||
gi = bottom_gi;
|
||||
@ -912,7 +905,7 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
/* We will break out when we hit the surface */
|
||||
do {
|
||||
/* Ascend to next stop depth */
|
||||
int deltad = ascent_velocity(depth, avg_depth, bottom_time) * TIMESTEP;
|
||||
int deltad = ascent_velocity(depth, avg_depth, bottom_time) * base_timestep;
|
||||
if (ascent_velocity(depth, avg_depth, bottom_time) != last_ascend_rate) {
|
||||
if (is_final_plan)
|
||||
plan_add_segment(diveplan, clock - previous_point_time, depth, current_cylinder, po2, false, divemode);
|
||||
@ -925,11 +918,11 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
|
||||
add_segment(ds, depth_to_bar(depth, dive),
|
||||
get_cylinder(dive, current_cylinder)->gasmix,
|
||||
TIMESTEP, po2, divemode, prefs.decosac, true);
|
||||
base_timestep, po2, divemode, prefs.decosac, true);
|
||||
last_segment_min_switch = false;
|
||||
clock += TIMESTEP;
|
||||
clock += base_timestep;
|
||||
depth -= deltad;
|
||||
/* Print VPM-Gradient as gradient factor, this has to be done from within deco.c */
|
||||
/* Print VPM-Gradient as gradient factor, this has to be done from within deco.cpp */
|
||||
if (decodive)
|
||||
ds->plot_depth = depth;
|
||||
} while (depth > 0 && depth > stoplevels[stopidx]);
|
||||
@ -955,10 +948,10 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
stopping = true;
|
||||
previous_point_time = clock;
|
||||
current_cylinder = gaschanges[gi].gasidx;
|
||||
gas = get_cylinder(dive, current_cylinder)->gasmix;
|
||||
if (divemode == CCR)
|
||||
po2 = setpoint_change(dive, current_cylinder);
|
||||
#if DEBUG_PLAN & 16
|
||||
gas = get_cylinder(dive, current_cylinder)->gasmix;
|
||||
printf("switch to gas %d (%d/%d) @ %5.2lfm\n", gaschanges[gi].gasidx,
|
||||
(get_o2(&gas) + 5) / 10, (get_he(&gas) + 5) / 10, gaschanges[gi].depth / 1000.0);
|
||||
#endif
|
||||
@ -1011,10 +1004,10 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
*/
|
||||
if (pendinggaschange) {
|
||||
current_cylinder = gaschanges[gi + 1].gasidx;
|
||||
gas = get_cylinder(dive, current_cylinder)->gasmix;
|
||||
if (divemode == CCR)
|
||||
po2 = setpoint_change(dive, current_cylinder);
|
||||
#if DEBUG_PLAN & 16
|
||||
gas = get_cylinder(dive, current_cylinder)->gasmix;
|
||||
printf("switch to gas %d (%d/%d) @ %5.2lfm\n", gaschanges[gi + 1].gasidx,
|
||||
(get_o2(&gas) + 5) / 10, (get_he(&gas) + 5) / 10, gaschanges[gi + 1].depth / 1000.0);
|
||||
#endif
|
||||
@ -1024,7 +1017,6 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
get_cylinder(dive, current_cylinder)->gasmix,
|
||||
prefs.min_switch_duration, po2, divemode, prefs.decosac, true);
|
||||
clock += prefs.min_switch_duration;
|
||||
last_segment_min_switch = true;
|
||||
}
|
||||
pendinggaschange = false;
|
||||
}
|
||||
@ -1034,7 +1026,7 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
laststoptime = new_clock - clock;
|
||||
/* Finish infinite deco */
|
||||
if (laststoptime >= 48 * 3600 && depth >= 6000) {
|
||||
error = LONGDECO;
|
||||
error = true;
|
||||
break;
|
||||
}
|
||||
|
||||
@ -1061,7 +1053,6 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
plan_add_segment(diveplan, laststoptime, depth, current_cylinder, po2, false, divemode);
|
||||
previous_point_time = clock + laststoptime;
|
||||
current_cylinder = break_cylinder;
|
||||
gas = get_cylinder(dive, current_cylinder)->gasmix;
|
||||
}
|
||||
} else if (o2break_next) {
|
||||
if (laststoptime >= 6 * 60) {
|
||||
@ -1073,7 +1064,6 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
plan_add_segment(diveplan, laststoptime, depth, current_cylinder, po2, false, divemode);
|
||||
previous_point_time = clock + laststoptime;
|
||||
current_cylinder = breakfrom_cylinder;
|
||||
gas = get_cylinder(dive, current_cylinder)->gasmix;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1119,13 +1109,10 @@ bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, i
|
||||
current_cylinder = dive->cylinders.nr;
|
||||
plan_add_segment(diveplan, prefs.surface_segment, 0, current_cylinder, 0, false, OC);
|
||||
}
|
||||
create_dive_from_plan(diveplan, dive, is_planner);
|
||||
create_dive_from_plan(diveplan, dive, dc, is_planner);
|
||||
add_plan_to_notes(diveplan, dive, show_disclaimer, error);
|
||||
fixup_dc_duration(&dive->dc);
|
||||
fixup_dc_duration(dc);
|
||||
|
||||
free(stoplevels);
|
||||
free(gaschanges);
|
||||
free(bottom_cache);
|
||||
return decodive;
|
||||
}
|
||||
|
||||
@ -1179,7 +1166,7 @@ static int get_permille(const char *begin, const char **end)
|
||||
return value;
|
||||
}
|
||||
|
||||
int validate_gas(const char *text, struct gasmix *gas)
|
||||
extern "C" int validate_gas(const char *text, struct gasmix *gas)
|
||||
{
|
||||
int o2, he;
|
||||
|
||||
@ -1226,7 +1213,7 @@ int validate_gas(const char *text, struct gasmix *gas)
|
||||
return 1;
|
||||
}
|
||||
|
||||
int validate_po2(const char *text, int *mbar_po2)
|
||||
extern "C" int validate_po2(const char *text, int *mbar_po2)
|
||||
{
|
||||
int po2;
|
||||
|
||||
@ -2,14 +2,9 @@
|
||||
#ifndef PLANNER_H
|
||||
#define PLANNER_H
|
||||
|
||||
#define LONGDECO 1
|
||||
#define NOT_RECREATIONAL 2
|
||||
|
||||
#include "units.h"
|
||||
#include "divemode.h"
|
||||
|
||||
#define DECOTIMESTEP 60 /* seconds. Unit of deco stop times */
|
||||
|
||||
/* this should be converted to use our types */
|
||||
struct divedatapoint {
|
||||
int time;
|
||||
@ -36,6 +31,8 @@ struct diveplan {
|
||||
int surface_interval;
|
||||
};
|
||||
|
||||
struct deco_state_cache;
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
@ -44,9 +41,8 @@ extern int validate_gas(const char *text, struct gasmix *gas);
|
||||
extern int validate_po2(const char *text, int *mbar_po2);
|
||||
extern int get_cylinderid_at_time(struct dive *dive, struct divecomputer *dc, duration_t time);
|
||||
extern bool diveplan_empty(struct diveplan *diveplan);
|
||||
extern void add_plan_to_notes(struct diveplan *diveplan, struct dive *dive, bool show_disclaimer, int error);
|
||||
extern void add_plan_to_notes(struct diveplan *diveplan, struct dive *dive, bool show_disclaimer, bool error);
|
||||
extern const char *get_planner_disclaimer();
|
||||
extern char *get_planner_disclaimer_formatted();
|
||||
|
||||
extern void free_dps(struct diveplan *diveplan);
|
||||
|
||||
@ -58,9 +54,12 @@ struct decostop {
|
||||
int depth;
|
||||
int time;
|
||||
};
|
||||
extern bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, int timestep, struct decostop *decostoptable, struct deco_state **cached_datap, bool is_planner, bool show_disclaimer);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
|
||||
#include <string>
|
||||
extern std::string get_planner_disclaimer_formatted();
|
||||
extern bool plan(struct deco_state *ds, struct diveplan *diveplan, struct dive *dive, int dcNr, int timestep, struct decostop *decostoptable, deco_state_cache &cache, bool is_planner, bool show_disclaimer);
|
||||
#endif
|
||||
#endif // PLANNER_H
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
// SPDX-License-Identifier: GPL-2.0
|
||||
/* planner.c
|
||||
/* plannernotes.cpp
|
||||
*
|
||||
* code that allows us to plan future dives
|
||||
* format notes describing a dive plan
|
||||
*
|
||||
* (c) Dirk Hohndel 2013
|
||||
*/
|
||||
@ -19,8 +19,8 @@
|
||||
#include "libdivecomputer/parser.h"
|
||||
#include "qthelper.h"
|
||||
#include "format.h"
|
||||
#include "subsurface-string.h"
|
||||
#include "version.h"
|
||||
#include "membuffer.h"
|
||||
|
||||
static int diveplan_duration(struct diveplan *diveplan)
|
||||
{
|
||||
@ -47,21 +47,22 @@ static int diveplan_duration(struct diveplan *diveplan)
|
||||
* 5) Pointers to gas mixes in the gas change: gas-from and gas-to.
|
||||
* Returns: The size of the output buffer that has been used after the new results have been added.
|
||||
*/
|
||||
static void add_icd_entry(struct membuffer *b, struct icd_data *icdvalues, bool printheader, int time_seconds, int ambientpressure_mbar, struct gasmix gas_from, struct gasmix gas_to)
|
||||
static std::string icd_entry(struct icd_data *icdvalues, bool printheader, int time_seconds, int ambientpressure_mbar, struct gasmix gas_from, struct gasmix gas_to)
|
||||
{
|
||||
std::string b;
|
||||
if (printheader) { // Create a table description and a table header if no icd data have been written yet.
|
||||
put_format(b, "<div>%s:", translate("gettextFromC","Isobaric counterdiffusion information"));
|
||||
put_format(b, "<table><tr><td align='left'><b>%s</b></td>", translate("gettextFromC", "runtime"));
|
||||
put_format(b, "<td align='center'><b>%s</b></td>", translate("gettextFromC", "gaschange"));
|
||||
put_format(b, "<td style='padding-left: 15px;'><b>%s</b></td>", translate("gettextFromC", "ΔHe"));
|
||||
put_format(b, "<td style='padding-left: 20px;'><b>%s</b></td>", translate("gettextFromC", "ΔN₂"));
|
||||
put_format(b, "<td style='padding-left: 10px;'><b>%s</b></td></tr>", translate("gettextFromC", "max ΔN₂"));
|
||||
b += format_string_std("<div>%s:", translate("gettextFromC","Isobaric counterdiffusion information"));
|
||||
b += format_string_std("<table><tr><td align='left'><b>%s</b></td>", translate("gettextFromC", "runtime"));
|
||||
b += format_string_std("<td align='center'><b>%s</b></td>", translate("gettextFromC", "gaschange"));
|
||||
b += format_string_std("<td style='padding-left: 15px;'><b>%s</b></td>", translate("gettextFromC", "ΔHe"));
|
||||
b += format_string_std("<td style='padding-left: 20px;'><b>%s</b></td>", translate("gettextFromC", "ΔN₂"));
|
||||
b += format_string_std("<td style='padding-left: 10px;'><b>%s</b></td></tr>", translate("gettextFromC", "max ΔN₂"));
|
||||
} // Add one entry to the icd table:
|
||||
put_format_loc(b,
|
||||
b += casprintf_loc(
|
||||
"<tr><td rowspan='2' style= 'vertical-align:top;'>%3d%s</td>"
|
||||
"<td rowspan=2 style= 'vertical-align:top;'>%s➙",
|
||||
(time_seconds + 30) / 60, translate("gettextFromC", "min"), gasname(gas_from));
|
||||
put_format_loc(b,
|
||||
b += casprintf_loc(
|
||||
"%s</td><td style='padding-left: 10px;'>%+5.1f%%</td>"
|
||||
"<td style= 'padding-left: 15px; color:%s;'>%+5.1f%%</td>"
|
||||
"<td style='padding-left: 15px;'>%+5.1f%%</td></tr>"
|
||||
@ -73,9 +74,10 @@ static void add_icd_entry(struct membuffer *b, struct icd_data *icdvalues, bool
|
||||
ambientpressure_mbar * icdvalues->dHe / 1e6f, translate("gettextFromC", "bar"), ((5 * icdvalues->dN2) > -icdvalues->dHe) ? "red" : "#383838",
|
||||
ambientpressure_mbar * icdvalues->dN2 / 1e6f, translate("gettextFromC", "bar"),
|
||||
ambientpressure_mbar * -icdvalues->dHe / 5e6f, translate("gettextFromC", "bar"));
|
||||
return b;
|
||||
}
|
||||
|
||||
const char *get_planner_disclaimer()
|
||||
extern "C" const char *get_planner_disclaimer()
|
||||
{
|
||||
return translate("gettextFromC", "DISCLAIMER / WARNING: THIS IMPLEMENTATION OF THE %s "
|
||||
"ALGORITHM AND A DIVE PLANNER IMPLEMENTATION BASED ON THAT HAS "
|
||||
@ -84,19 +86,17 @@ const char *get_planner_disclaimer()
|
||||
}
|
||||
|
||||
/* Returns newly allocated buffer. Must be freed by caller */
|
||||
char *get_planner_disclaimer_formatted()
|
||||
extern std::string get_planner_disclaimer_formatted()
|
||||
{
|
||||
struct membuffer buf = { 0 };
|
||||
const char *deco = decoMode(true) == VPMB ? translate("gettextFromC", "VPM-B")
|
||||
: translate("gettextFromC", "BUHLMANN");
|
||||
put_format(&buf, get_planner_disclaimer(), deco);
|
||||
return detach_cstring(&buf);
|
||||
return format_string_std(get_planner_disclaimer(), deco);
|
||||
}
|
||||
|
||||
void add_plan_to_notes(struct diveplan *diveplan, struct dive *dive, bool show_disclaimer, int error)
|
||||
extern "C" void add_plan_to_notes(struct diveplan *diveplan, struct dive *dive, bool show_disclaimer, bool error)
|
||||
{
|
||||
struct membuffer buf = { 0 };
|
||||
struct membuffer icdbuf = { 0 };
|
||||
std::string buf;
|
||||
std::string icdbuf;
|
||||
const char *segmentsymbol;
|
||||
int lastdepth = 0, lasttime = 0, lastsetpoint = -1, newdepth = 0, lastprintdepth = 0, lastprintsetpoint = -1;
|
||||
struct gasmix lastprintgasmix = gasmix_invalid;
|
||||
@ -115,69 +115,68 @@ void add_plan_to_notes(struct diveplan *diveplan, struct dive *dive, bool show_d
|
||||
struct divedatapoint *nextdp = NULL;
|
||||
struct divedatapoint *lastbottomdp = NULL;
|
||||
struct icd_data icdvalues;
|
||||
char *temp;
|
||||
|
||||
if (!dp)
|
||||
return;
|
||||
|
||||
if (error) {
|
||||
put_format(&buf, "<span style='color: red;'>%s </span> %s<br/>",
|
||||
buf += format_string_std("<span style='color: red;'>%s </span> %s<br/>",
|
||||
translate("gettextFromC", "Warning:"),
|
||||
translate("gettextFromC", "Decompression calculation aborted due to excessive time"));
|
||||
goto finished;
|
||||
// TODO: avoid copy
|
||||
free(dive->notes);
|
||||
dive->notes = strdup(buf.c_str());
|
||||
return;
|
||||
}
|
||||
|
||||
if (show_disclaimer) {
|
||||
char *disclaimer = get_planner_disclaimer_formatted();
|
||||
put_string(&buf, "<div><b>");
|
||||
put_string(&buf, disclaimer);
|
||||
put_string(&buf, "</b><br/>\n</div>\n");
|
||||
free(disclaimer);
|
||||
buf += "<div><b>";
|
||||
buf += get_planner_disclaimer_formatted();
|
||||
buf += "</b><br/>\n</div>\n";
|
||||
}
|
||||
|
||||
put_string(&buf, "<div>\n<b>");
|
||||
buf += "<div>\n<b>";
|
||||
if (diveplan->surface_interval < 0) {
|
||||
put_format(&buf, "%s (%s) %s",
|
||||
buf += format_string_std("%s (%s) %s",
|
||||
translate("gettextFromC", "Subsurface"),
|
||||
subsurface_canonical_version(),
|
||||
translate("gettextFromC", "dive plan</b> (overlapping dives detected)"));
|
||||
goto finished;
|
||||
// TODO: avoid copy
|
||||
free(dive->notes);
|
||||
dive->notes = strdup(buf.c_str());
|
||||
return;
|
||||
} else if (diveplan->surface_interval >= 48 * 60 *60) {
|
||||
char *current_date = get_current_date();
|
||||
put_format(&buf, "%s (%s) %s %s",
|
||||
buf += format_string_std("%s (%s) %s %s",
|
||||
translate("gettextFromC", "Subsurface"),
|
||||
subsurface_canonical_version(),
|
||||
translate("gettextFromC", "dive plan</b> created on"),
|
||||
current_date);
|
||||
free(current_date);
|
||||
get_current_date().c_str());
|
||||
} else {
|
||||
char *current_date = get_current_date();
|
||||
put_format_loc(&buf, "%s (%s) %s %d:%02d) %s %s",
|
||||
buf += casprintf_loc("%s (%s) %s %d:%02d) %s %s",
|
||||
translate("gettextFromC", "Subsurface"),
|
||||
subsurface_canonical_version(),
|
||||
translate("gettextFromC", "dive plan</b> (surface interval "),
|
||||
FRACTION(diveplan->surface_interval / 60, 60),
|
||||
FRACTION_TUPLE(diveplan->surface_interval / 60, 60),
|
||||
translate("gettextFromC", "created on"),
|
||||
current_date);
|
||||
free(current_date);
|
||||
get_current_date().c_str());
|
||||
}
|
||||
put_string(&buf, "<br/>\n");
|
||||
buf += "<br/>\n";
|
||||
|
||||
if (prefs.display_variations && decoMode(true) != RECREATIONAL)
|
||||
put_format_loc(&buf, translate("gettextFromC", "Runtime: %dmin%s"),
|
||||
buf += casprintf_loc(translate("gettextFromC", "Runtime: %dmin%s"),
|
||||
diveplan_duration(diveplan), "VARIATIONS");
|
||||
else
|
||||
put_format_loc(&buf, translate("gettextFromC", "Runtime: %dmin%s"),
|
||||
buf += casprintf_loc(translate("gettextFromC", "Runtime: %dmin%s"),
|
||||
diveplan_duration(diveplan), "");
|
||||
put_string(&buf, "<br/>\n</div>\n");
|
||||
buf += "<br/>\n</div>\n";
|
||||
|
||||
if (!plan_verbatim) {
|
||||
put_format(&buf, "<table>\n<thead>\n<tr><th></th><th>%s</th>", translate("gettextFromC", "depth"));
|
||||
buf += format_string_std("<table>\n<thead>\n<tr><th></th><th>%s</th>", translate("gettextFromC", "depth"));
|
||||
if (plan_display_duration)
|
||||
put_format(&buf, "<th style='padding-left: 10px;'>%s</th>", translate("gettextFromC", "duration"));
|
||||
buf += format_string_std("<th style='padding-left: 10px;'>%s</th>", translate("gettextFromC", "duration"));
|
||||
if (plan_display_runtime)
|
||||
put_format(&buf, "<th style='padding-left: 10px;'>%s</th>", translate("gettextFromC", "runtime"));
|
||||
put_format(&buf, "<th style='padding-left: 10px; float: left;'>%s</th></tr>\n</thead>\n<tbody style='float: left;'>\n",
|
||||
buf += format_string_std("<th style='padding-left: 10px;'>%s</th>", translate("gettextFromC", "runtime"));
|
||||
buf += format_string_std("<th style='padding-left: 10px; float: left;'>%s</th></tr>\n</thead>\n<tbody style='float: left;'>\n",
|
||||
translate("gettextFromC", "gas"));
|
||||
}
|
||||
|
||||
@ -232,44 +231,44 @@ void add_plan_to_notes(struct diveplan *diveplan, struct dive *dive, bool show_d
|
||||
if (dp->depth.mm != lastprintdepth) {
|
||||
if (plan_display_transitions || dp->entered || !dp->next || (gaschange_after && dp->next && dp->depth.mm != nextdp->depth.mm)) {
|
||||
if (dp->setpoint) {
|
||||
put_format_loc(&buf, translate("gettextFromC", "%s to %.*f %s in %d:%02d min - runtime %d:%02u on %s (SP = %.1fbar)"),
|
||||
buf += casprintf_loc(translate("gettextFromC", "%s to %.*f %s in %d:%02d min - runtime %d:%02u on %s (SP = %.1fbar)"),
|
||||
dp->depth.mm < lastprintdepth ? translate("gettextFromC", "Ascend") : translate("gettextFromC", "Descend"),
|
||||
decimals, depthvalue, depth_unit,
|
||||
FRACTION(dp->time - lasttime, 60),
|
||||
FRACTION(dp->time, 60),
|
||||
FRACTION_TUPLE(dp->time - lasttime, 60),
|
||||
FRACTION_TUPLE(dp->time, 60),
|
||||
gasname(gasmix),
|
||||
(double) dp->setpoint / 1000.0);
|
||||
} else {
|
||||
put_format_loc(&buf, translate("gettextFromC", "%s to %.*f %s in %d:%02d min - runtime %d:%02u on %s"),
|
||||
buf += casprintf_loc(translate("gettextFromC", "%s to %.*f %s in %d:%02d min - runtime %d:%02u on %s"),
|
||||
dp->depth.mm < lastprintdepth ? translate("gettextFromC", "Ascend") : translate("gettextFromC", "Descend"),
|
||||
decimals, depthvalue, depth_unit,
|
||||
FRACTION(dp->time - lasttime, 60),
|
||||
FRACTION(dp->time, 60),
|
||||
FRACTION_TUPLE(dp->time - lasttime, 60),
|
||||
FRACTION_TUPLE(dp->time, 60),
|
||||
gasname(gasmix));
|
||||
}
|
||||
|
||||
put_string(&buf, "<br/>\n");
|
||||
buf += "<br/>\n";
|
||||
}
|
||||
newdepth = dp->depth.mm;
|
||||
lasttime = dp->time;
|
||||
} else {
|
||||
if ((nextdp && dp->depth.mm != nextdp->depth.mm) || gaschange_after) {
|
||||
if (dp->setpoint) {
|
||||
put_format_loc(&buf, translate("gettextFromC", "Stay at %.*f %s for %d:%02d min - runtime %d:%02u on %s (SP = %.1fbar CCR)"),
|
||||
buf += casprintf_loc(translate("gettextFromC", "Stay at %.*f %s for %d:%02d min - runtime %d:%02u on %s (SP = %.1fbar CCR)"),
|
||||
decimals, depthvalue, depth_unit,
|
||||
FRACTION(dp->time - lasttime, 60),
|
||||
FRACTION(dp->time, 60),
|
||||
FRACTION_TUPLE(dp->time - lasttime, 60),
|
||||
FRACTION_TUPLE(dp->time, 60),
|
||||
gasname(gasmix),
|
||||
(double) dp->setpoint / 1000.0);
|
||||
} else {
|
||||
put_format_loc(&buf, translate("gettextFromC", "Stay at %.*f %s for %d:%02d min - runtime %d:%02u on %s %s"),
|
||||
buf += casprintf_loc(translate("gettextFromC", "Stay at %.*f %s for %d:%02d min - runtime %d:%02u on %s %s"),
|
||||
decimals, depthvalue, depth_unit,
|
||||
FRACTION(dp->time - lasttime, 60),
|
||||
FRACTION(dp->time, 60),
|
||||
FRACTION_TUPLE(dp->time - lasttime, 60),
|
||||
FRACTION_TUPLE(dp->time, 60),
|
||||
gasname(gasmix),
|
||||
translate("gettextFromC", divemode_text_ui[dp->divemode]));
|
||||
}
|
||||
put_string(&buf, "<br/>\n");
|
||||
buf += "<br/>\n";
|
||||
newdepth = dp->depth.mm;
|
||||
lasttime = dp->time;
|
||||
}
|
||||
@ -307,20 +306,17 @@ void add_plan_to_notes(struct diveplan *diveplan, struct dive *dive, bool show_d
|
||||
else
|
||||
segmentsymbol = "-"; // minus sign (a.k.a. horizontal line) for deco stop
|
||||
|
||||
put_format(&buf, "<tr><td style='padding-left: 10px; float: right;'>%s</td>", segmentsymbol);
|
||||
buf += format_string_std("<tr><td style='padding-left: 10px; float: right;'>%s</td>", segmentsymbol);
|
||||
|
||||
asprintf_loc(&temp, translate("gettextFromC", "%3.0f%s"), depthvalue, depth_unit);
|
||||
put_format(&buf, "<td style='padding-left: 10px; float: right;'>%s</td>", temp);
|
||||
free(temp);
|
||||
std::string temp = casprintf_loc(translate("gettextFromC", "%3.0f%s"), depthvalue, depth_unit);
|
||||
buf += format_string_std("<td style='padding-left: 10px; float: right;'>%s</td>", temp.c_str());
|
||||
if (plan_display_duration) {
|
||||
asprintf_loc(&temp, translate("gettextFromC", "%3dmin"), (dp->time - lasttime + 30) / 60);
|
||||
put_format(&buf, "<td style='padding-left: 10px; float: right;'>%s</td>", temp);
|
||||
free(temp);
|
||||
temp = casprintf_loc(translate("gettextFromC", "%3dmin"), (dp->time - lasttime + 30) / 60);
|
||||
buf += format_string_std("<td style='padding-left: 10px; float: right;'>%s</td>", temp.c_str());
|
||||
}
|
||||
if (plan_display_runtime) {
|
||||
asprintf_loc(&temp, translate("gettextFromC", "%3dmin"), (dp->time + 30) / 60);
|
||||
put_format(&buf, "<td style='padding-left: 10px; float: right;'>%s</td>", temp);
|
||||
free(temp);
|
||||
temp = casprintf_loc(translate("gettextFromC", "%3dmin"), (dp->time + 30) / 60);
|
||||
buf += format_string_std("<td style='padding-left: 10px; float: right;'>%s</td>", temp.c_str());
|
||||
}
|
||||
|
||||
/* Normally a gas change is displayed on the stopping segment, so only display a gas change at the end of
|
||||
@ -328,18 +324,17 @@ void add_plan_to_notes(struct diveplan *diveplan, struct dive *dive, bool show_d
|
||||
*/
|
||||
if (isascent && gaschange_after && dp->next && nextdp && nextdp->entered) {
|
||||
if (nextdp->setpoint) {
|
||||
asprintf_loc(&temp, translate("gettextFromC", "(SP = %.1fbar CCR)"), nextdp->setpoint / 1000.0);
|
||||
put_format(&buf, "<td style='padding-left: 10px; color: red; float: left;'><b>%s %s</b></td>",
|
||||
gasname(newgasmix), temp);
|
||||
free(temp);
|
||||
temp = casprintf_loc(translate("gettextFromC", "(SP = %.1fbar CCR)"), nextdp->setpoint / 1000.0);
|
||||
buf += format_string_std("<td style='padding-left: 10px; color: red; float: left;'><b>%s %s</b></td>",
|
||||
gasname(newgasmix), temp.c_str());
|
||||
} else {
|
||||
put_format(&buf, "<td style='padding-left: 10px; color: red; float: left;'><b>%s %s</b></td>",
|
||||
buf += format_string_std("<td style='padding-left: 10px; color: red; float: left;'><b>%s %s</b></td>",
|
||||
gasname(newgasmix), dp->divemode == UNDEF_COMP_TYPE || dp->divemode == nextdp->divemode ? "" : translate("gettextFromC", divemode_text_ui[nextdp->divemode]));
|
||||
if (isascent && (get_he(lastprintgasmix) > 0)) { // For a trimix gas change on ascent, save ICD info if previous cylinder had helium
|
||||
if (isobaric_counterdiffusion(lastprintgasmix, newgasmix, &icdvalues)) // Do icd calulations
|
||||
icdwarning = true;
|
||||
if (icdvalues.dN2 > 0) { // If the gas change involved helium as well as an increase in nitrogen..
|
||||
add_icd_entry(&icdbuf, &icdvalues, icdtableheader, dp->time, depth_to_mbar(dp->depth.mm, dive), lastprintgasmix, newgasmix); // .. then print calculations to buffer.
|
||||
icdbuf += icd_entry(&icdvalues, icdtableheader, dp->time, depth_to_mbar(dp->depth.mm, dive), lastprintgasmix, newgasmix); // .. then print calculations to buffer.
|
||||
icdtableheader = false;
|
||||
}
|
||||
}
|
||||
@ -351,17 +346,16 @@ void add_plan_to_notes(struct diveplan *diveplan, struct dive *dive, bool show_d
|
||||
} else if (gaschange_before || rebreatherchange_before) {
|
||||
// If a new gas has been used for this segment, now is the time to show it
|
||||
if (dp->setpoint) {
|
||||
asprintf_loc(&temp, translate("gettextFromC", "(SP = %.1fbar CCR)"), (double) dp->setpoint / 1000.0);
|
||||
put_format(&buf, "<td style='padding-left: 10px; color: red; float: left;'><b>%s %s</b></td>", gasname(gasmix), temp);
|
||||
free(temp);
|
||||
temp = casprintf_loc(translate("gettextFromC", "(SP = %.1fbar CCR)"), (double) dp->setpoint / 1000.0);
|
||||
buf += format_string_std("<td style='padding-left: 10px; color: red; float: left;'><b>%s %s</b></td>", gasname(gasmix), temp.c_str());
|
||||
} else {
|
||||
put_format(&buf, "<td style='padding-left: 10px; color: red; float: left;'><b>%s %s</b></td>", gasname(gasmix),
|
||||
buf += format_string_std("<td style='padding-left: 10px; color: red; float: left;'><b>%s %s</b></td>", gasname(gasmix),
|
||||
lastdivemode == UNDEF_COMP_TYPE || lastdivemode == dp->divemode ? "" : translate("gettextFromC", divemode_text_ui[dp->divemode]));
|
||||
if (get_he(lastprintgasmix) > 0) { // For a trimix gas change, save ICD info if previous cylinder had helium
|
||||
if (isobaric_counterdiffusion(lastprintgasmix, gasmix, &icdvalues)) // Do icd calculations
|
||||
icdwarning = true;
|
||||
if (icdvalues.dN2 > 0) { // If the gas change involved helium as well as an increase in nitrogen..
|
||||
add_icd_entry(&icdbuf, &icdvalues, icdtableheader, lasttime, depth_to_mbar(dp->depth.mm, dive), lastprintgasmix, gasmix); // .. then print data to buffer.
|
||||
icdbuf += icd_entry(&icdvalues, icdtableheader, lasttime, depth_to_mbar(dp->depth.mm, dive), lastprintgasmix, gasmix); // .. then print data to buffer.
|
||||
icdtableheader = false;
|
||||
}
|
||||
}
|
||||
@ -372,9 +366,9 @@ void add_plan_to_notes(struct diveplan *diveplan, struct dive *dive, bool show_d
|
||||
lastdivemode = dp->divemode;
|
||||
gaschange_after = false;
|
||||
} else {
|
||||
put_string(&buf, "<td> </td>");
|
||||
buf += "<td> </td>";
|
||||
}
|
||||
put_string(&buf, "</tr>\n");
|
||||
buf += "</tr>\n";
|
||||
newdepth = dp->depth.mm;
|
||||
// Only add the time we actually displayed so rounding errors dont accumulate
|
||||
lasttime += ((dp->time - lasttime + 30) / 60) * 60;
|
||||
@ -385,19 +379,19 @@ void add_plan_to_notes(struct diveplan *diveplan, struct dive *dive, bool show_d
|
||||
if (plan_verbatim) {
|
||||
if (lastsetpoint >= 0) {
|
||||
if (nextdp && nextdp->setpoint) {
|
||||
put_format_loc(&buf, translate("gettextFromC", "Switch gas to %s (SP = %.1fbar)"), gasname(newgasmix), (double) nextdp->setpoint / 1000.0);
|
||||
buf += casprintf_loc(translate("gettextFromC", "Switch gas to %s (SP = %.1fbar)"), gasname(newgasmix), (double) nextdp->setpoint / 1000.0);
|
||||
} else {
|
||||
put_format(&buf, translate("gettextFromC", "Switch gas to %s"), gasname(newgasmix));
|
||||
buf += format_string_std(translate("gettextFromC", "Switch gas to %s"), gasname(newgasmix));
|
||||
if ((isascent) && (get_he(lastprintgasmix) > 0)) { // For a trimix gas change on ascent:
|
||||
if (isobaric_counterdiffusion(lastprintgasmix, newgasmix, &icdvalues)) // Do icd calculations
|
||||
icdwarning = true;
|
||||
if (icdvalues.dN2 > 0) { // If the gas change involved helium as well as an increase in nitrogen..
|
||||
add_icd_entry(&icdbuf, &icdvalues, icdtableheader, dp->time, depth_to_mbar(dp->depth.mm, dive), lastprintgasmix, newgasmix); // ... then print data to buffer.
|
||||
icdbuf += icd_entry(&icdvalues, icdtableheader, dp->time, depth_to_mbar(dp->depth.mm, dive), lastprintgasmix, newgasmix); // ... then print data to buffer.
|
||||
icdtableheader = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
put_string(&buf, "<br/>\n");
|
||||
buf += "<br/>\n";
|
||||
}
|
||||
lastprintgasmix = newgasmix;
|
||||
gaschange_after = false;
|
||||
@ -410,63 +404,68 @@ void add_plan_to_notes(struct diveplan *diveplan, struct dive *dive, bool show_d
|
||||
lastentered = dp->entered;
|
||||
} while ((dp = nextdp) != NULL);
|
||||
if (!plan_verbatim)
|
||||
put_string(&buf, "</tbody>\n</table>\n<br/>\n");
|
||||
buf += "</tbody>\n</table>\n<br/>\n";
|
||||
|
||||
/* Print the CNS and OTU next.*/
|
||||
dive->cns = 0;
|
||||
dive->maxcns = 0;
|
||||
update_cylinder_related_info(dive);
|
||||
put_format_loc(&buf, "<div>\n%s: %i%%", translate("gettextFromC", "CNS"), dive->cns);
|
||||
put_format_loc(&buf, "<br/>\n%s: %i<br/>\n</div>\n", translate("gettextFromC", "OTU"), dive->otu);
|
||||
buf += casprintf_loc("<div>\n%s: %i%%", translate("gettextFromC", "CNS"), dive->cns);
|
||||
buf += casprintf_loc("<br/>\n%s: %i<br/>\n</div>\n", translate("gettextFromC", "OTU"), dive->otu);
|
||||
|
||||
/* Print the settings for the diveplan next. */
|
||||
put_string(&buf, "<div>\n");
|
||||
buf += "<div>\n";
|
||||
if (decoMode(true) == BUEHLMANN) {
|
||||
put_format_loc(&buf, translate("gettextFromC", "Deco model: Bühlmann ZHL-16C with GFLow = %d%% and GFHigh = %d%%"), diveplan->gflow, diveplan->gfhigh);
|
||||
buf += casprintf_loc(translate("gettextFromC", "Deco model: Bühlmann ZHL-16C with GFLow = %d%% and GFHigh = %d%%"), diveplan->gflow, diveplan->gfhigh);
|
||||
} else if (decoMode(true) == VPMB) {
|
||||
if (diveplan->vpmb_conservatism == 0)
|
||||
put_string(&buf, translate("gettextFromC", "Deco model: VPM-B at nominal conservatism"));
|
||||
buf += translate("gettextFromC", "Deco model: VPM-B at nominal conservatism");
|
||||
else
|
||||
put_format_loc(&buf, translate("gettextFromC", "Deco model: VPM-B at +%d conservatism"), diveplan->vpmb_conservatism);
|
||||
buf += casprintf_loc(translate("gettextFromC", "Deco model: VPM-B at +%d conservatism"), diveplan->vpmb_conservatism);
|
||||
if (diveplan->eff_gflow)
|
||||
put_format_loc(&buf, translate("gettextFromC", ", effective GF=%d/%d"), diveplan->eff_gflow, diveplan->eff_gfhigh);
|
||||
buf += casprintf_loc( translate("gettextFromC", ", effective GF=%d/%d"), diveplan->eff_gflow, diveplan->eff_gfhigh);
|
||||
} else if (decoMode(true) == RECREATIONAL) {
|
||||
put_format_loc(&buf, translate("gettextFromC", "Deco model: Recreational mode based on Bühlmann ZHL-16B with GFLow = %d%% and GFHigh = %d%%"),
|
||||
buf += casprintf_loc(translate("gettextFromC", "Deco model: Recreational mode based on Bühlmann ZHL-16B with GFLow = %d%% and GFHigh = %d%%"),
|
||||
diveplan->gflow, diveplan->gfhigh);
|
||||
}
|
||||
put_string(&buf, "<br/>\n");
|
||||
buf += "<br/>\n";
|
||||
|
||||
const char *depth_unit;
|
||||
int altitude = (int) get_depth_units((int) (pressure_to_altitude(diveplan->surface_pressure)), NULL, &depth_unit);
|
||||
{
|
||||
const char *depth_unit;
|
||||
int altitude = (int) get_depth_units((int) (pressure_to_altitude(diveplan->surface_pressure)), NULL, &depth_unit);
|
||||
|
||||
put_format_loc(&buf, translate("gettextFromC", "ATM pressure: %dmbar (%d%s)<br/>\n</div>\n"), diveplan->surface_pressure, altitude, depth_unit);
|
||||
buf += casprintf_loc(translate("gettextFromC", "ATM pressure: %dmbar (%d%s)<br/>\n</div>\n"), diveplan->surface_pressure, altitude, depth_unit);
|
||||
}
|
||||
|
||||
/* Get SAC values and units for printing it in gas consumption */
|
||||
double bottomsacvalue, decosacvalue;
|
||||
int sacdecimals;
|
||||
const char* sacunit;
|
||||
{
|
||||
double bottomsacvalue, decosacvalue;
|
||||
int sacdecimals;
|
||||
const char* sacunit;
|
||||
|
||||
bottomsacvalue = get_volume_units(prefs.bottomsac, &sacdecimals, &sacunit);
|
||||
decosacvalue = get_volume_units(prefs.decosac, NULL, NULL);
|
||||
bottomsacvalue = get_volume_units(prefs.bottomsac, &sacdecimals, &sacunit);
|
||||
decosacvalue = get_volume_units(prefs.decosac, NULL, NULL);
|
||||
|
||||
/* Reduce number of decimals from 1 to 0 for bar/min, keep 2 for cuft/min */
|
||||
if (sacdecimals==1) sacdecimals--;
|
||||
/* Reduce number of decimals from 1 to 0 for bar/min, keep 2 for cuft/min */
|
||||
if (sacdecimals==1) sacdecimals--;
|
||||
|
||||
/* Print the gas consumption next.*/
|
||||
if (dive->dc.divemode == CCR)
|
||||
temp = strdup(translate("gettextFromC", "Gas consumption (CCR legs excluded):"));
|
||||
else
|
||||
asprintf_loc(&temp, "%s %.*f|%.*f%s/min):", translate("gettextFromC", "Gas consumption (based on SAC"),
|
||||
sacdecimals, bottomsacvalue, sacdecimals, decosacvalue, sacunit);
|
||||
put_format(&buf, "<div>\n%s<br/>\n", temp);
|
||||
free(temp);
|
||||
/* Print the gas consumption next.*/
|
||||
std::string temp;
|
||||
if (dive->dc.divemode == CCR)
|
||||
temp = translate("gettextFromC", "Gas consumption (CCR legs excluded):");
|
||||
else
|
||||
temp = casprintf_loc("%s %.*f|%.*f%s/min):", translate("gettextFromC", "Gas consumption (based on SAC"),
|
||||
sacdecimals, bottomsacvalue, sacdecimals, decosacvalue, sacunit);
|
||||
buf += format_string_std("<div>\n%s<br/>\n", temp.c_str());
|
||||
}
|
||||
|
||||
/* Print gas consumption: This loop covers all cylinders */
|
||||
for (int gasidx = 0; gasidx < dive->cylinders.nr; gasidx++) {
|
||||
double volume, pressure, deco_volume, deco_pressure, mingas_volume, mingas_pressure, mingas_d_pressure, mingas_depth;
|
||||
const char *unit, *pressure_unit, *depth_unit;
|
||||
char warning[1000] = "";
|
||||
char mingas[1000] = "";
|
||||
std::string temp;
|
||||
std::string warning;
|
||||
std::string mingas;
|
||||
cylinder_t *cyl = get_cylinder(dive, gasidx);
|
||||
if (cyl->cylinder_use == NOT_USED)
|
||||
continue;
|
||||
@ -483,13 +482,13 @@ void add_plan_to_notes(struct diveplan *diveplan, struct dive *dive, bool show_d
|
||||
* This only works if we have working pressure for the cylinder
|
||||
* 10bar is a made up number - but it seemed silly to pretend you could breathe cylinder down to 0 */
|
||||
if (cyl->end.mbar < 10000)
|
||||
snprintf(warning, sizeof(warning), "<br/>\n — <span style='color: red;'>%s </span> %s",
|
||||
warning = format_string_std("<br/>\n — <span style='color: red;'>%s </span> %s",
|
||||
translate("gettextFromC", "Warning:"),
|
||||
translate("gettextFromC", "this is more gas than available in the specified cylinder!"));
|
||||
else
|
||||
if (cyl->end.mbar / 1000.0 * cyl->type.size.mliter / gas_compressibility_factor(cyl->gasmix, cyl->end.mbar / 1000.0)
|
||||
< cyl->deco_gas_used.mliter)
|
||||
snprintf(warning, sizeof(warning), "<br/>\n — <span style='color: red;'>%s </span> %s",
|
||||
warning = format_string_std("<br/>\n — <span style='color: red;'>%s </span> %s",
|
||||
translate("gettextFromC", "Warning:"),
|
||||
translate("gettextFromC", "not enough reserve for gas sharing on ascent!"));
|
||||
|
||||
@ -513,7 +512,7 @@ void add_plan_to_notes(struct diveplan *diveplan, struct dive *dive, bool show_d
|
||||
mingas_depth = get_depth_units(lastbottomdp->depth.mm, NULL, &depth_unit);
|
||||
/* Print it to results */
|
||||
if (cyl->start.mbar > lastbottomdp->minimum_gas.mbar) {
|
||||
snprintf_loc(mingas, sizeof(mingas), "<br/>\n — <span style='color: %s;'>%s</span> (%s %.1fx%s/+%d%s@%.0f%s): "
|
||||
mingas = casprintf_loc("<br/>\n — <span style='color: %s;'>%s</span> (%s %.1fx%s/+%d%s@%.0f%s): "
|
||||
"%.0f%s/%.0f%s<span style='color: %s;'>/Δ:%+.0f%s</span>",
|
||||
mingas_d_pressure > 0 ? "green" :"red",
|
||||
translate("gettextFromC", "Minimum gas"),
|
||||
@ -528,98 +527,96 @@ void add_plan_to_notes(struct diveplan *diveplan, struct dive *dive, bool show_d
|
||||
mingas_d_pressure > 0 ? "grey" :"indianred",
|
||||
mingas_d_pressure, pressure_unit);
|
||||
} else {
|
||||
snprintf(warning, sizeof(warning), "<br/>\n — <span style='color: red;'>%s </span> %s",
|
||||
warning = format_string_std("<br/>\n — <span style='color: red;'>%s </span> %s",
|
||||
translate("gettextFromC", "Warning:"),
|
||||
translate("gettextFromC", "required minimum gas for ascent already exceeding start pressure of cylinder!"));
|
||||
}
|
||||
}
|
||||
/* Print the gas consumption for every cylinder here to temp buffer. */
|
||||
if (lrint(volume) > 0) {
|
||||
asprintf_loc(&temp, translate("gettextFromC", "%.0f%s/%.0f%s of <span style='color: red;'><b>%s</b></span> (%.0f%s/%.0f%s in planned ascent)"),
|
||||
temp = casprintf_loc(translate("gettextFromC", "%.0f%s/%.0f%s of <span style='color: red;'><b>%s</b></span> (%.0f%s/%.0f%s in planned ascent)"),
|
||||
volume, unit, pressure, pressure_unit, gasname(cyl->gasmix), deco_volume, unit, deco_pressure, pressure_unit);
|
||||
} else {
|
||||
asprintf_loc(&temp, translate("gettextFromC", "%.0f%s/%.0f%s of <span style='color: red;'><b>%s</b></span>"),
|
||||
temp = casprintf_loc(translate("gettextFromC", "%.0f%s/%.0f%s of <span style='color: red;'><b>%s</b></span>"),
|
||||
volume, unit, pressure, pressure_unit, gasname(cyl->gasmix));
|
||||
}
|
||||
} else {
|
||||
if (lrint(volume) > 0) {
|
||||
asprintf_loc(&temp, translate("gettextFromC", "%.0f%s of <span style='color: red;'><b>%s</b></span> (%.0f%s during planned ascent)"),
|
||||
temp = casprintf_loc(translate("gettextFromC", "%.0f%s of <span style='color: red;'><b>%s</b></span> (%.0f%s during planned ascent)"),
|
||||
volume, unit, gasname(cyl->gasmix), deco_volume, unit);
|
||||
} else {
|
||||
asprintf_loc(&temp, translate("gettextFromC", "%.0f%s of <span style='color: red;'><b>%s</b></span>"),
|
||||
temp = casprintf_loc(translate("gettextFromC", "%.0f%s of <span style='color: red;'><b>%s</b></span>"),
|
||||
volume, unit, gasname(cyl->gasmix));
|
||||
}
|
||||
}
|
||||
/* Gas consumption: Now finally print all strings to output */
|
||||
put_format(&buf, "%s%s%s<br/>\n", temp, warning, mingas);
|
||||
free(temp);
|
||||
buf += format_string_std("%s%s%s<br/>\n", temp.c_str(), warning.c_str(), mingas.c_str());
|
||||
}
|
||||
put_format(&buf, "</div>\n");
|
||||
buf += "</div>\n";
|
||||
|
||||
/* For trimix OC dives, if an icd table header and icd data were printed to buffer, then add the ICD table here */
|
||||
if (!icdtableheader && prefs.show_icd) {
|
||||
put_string(&icdbuf, "</tbody></table>\n"); // End the ICD table
|
||||
mb_cstring(&icdbuf);
|
||||
put_string(&buf, icdbuf.buffer); // ..and add it to the html buffer
|
||||
icdbuf += "</tbody></table>\n"; // End the ICD table
|
||||
buf += icdbuf;
|
||||
if (icdwarning) { // If necessary, add warning
|
||||
put_format(&buf, "<span style='color: red;'>%s</span> %s",
|
||||
buf += format_string_std("<span style='color: red;'>%s</span> %s",
|
||||
translate("gettextFromC", "Warning:"),
|
||||
translate("gettextFromC", "Isobaric counterdiffusion conditions exceeded"));
|
||||
}
|
||||
put_string(&buf, "<br/></div>\n");
|
||||
buf += "<br/></div>\n";
|
||||
}
|
||||
free_buffer(&icdbuf);
|
||||
|
||||
/* Print warnings for pO2 */
|
||||
dp = diveplan->dp;
|
||||
bool o2warning_exist = false;
|
||||
enum divemode_t current_divemode;
|
||||
double amb;
|
||||
const struct event *evd = NULL;
|
||||
current_divemode = UNDEF_COMP_TYPE;
|
||||
/* Print warnings for pO2 (move into separate function?) */
|
||||
{
|
||||
dp = diveplan->dp;
|
||||
bool o2warning_exist = false;
|
||||
enum divemode_t current_divemode;
|
||||
double amb;
|
||||
const struct event *evd = NULL;
|
||||
current_divemode = UNDEF_COMP_TYPE;
|
||||
|
||||
if (dive->dc.divemode != CCR) {
|
||||
while (dp) {
|
||||
if (dp->time != 0) {
|
||||
struct gas_pressures pressures;
|
||||
struct gasmix gasmix = get_cylinder(dive, dp->cylinderid)->gasmix;
|
||||
if (dive->dc.divemode != CCR) {
|
||||
while (dp) {
|
||||
if (dp->time != 0) {
|
||||
std::string temp;
|
||||
struct gas_pressures pressures;
|
||||
struct gasmix gasmix = get_cylinder(dive, dp->cylinderid)->gasmix;
|
||||
|
||||
current_divemode = get_current_divemode(&dive->dc, dp->time, &evd, ¤t_divemode);
|
||||
amb = depth_to_atm(dp->depth.mm, dive);
|
||||
fill_pressures(&pressures, amb, gasmix, (current_divemode == OC) ? 0.0 : amb * gasmix.o2.permille / 1000.0, current_divemode);
|
||||
current_divemode = get_current_divemode(&dive->dc, dp->time, &evd, ¤t_divemode);
|
||||
amb = depth_to_atm(dp->depth.mm, dive);
|
||||
fill_pressures(&pressures, amb, gasmix, (current_divemode == OC) ? 0.0 : amb * gasmix.o2.permille / 1000.0, current_divemode);
|
||||
|
||||
if (pressures.o2 > (dp->entered ? prefs.bottompo2 : prefs.decopo2) / 1000.0) {
|
||||
const char *depth_unit;
|
||||
int decimals;
|
||||
double depth_value = get_depth_units(dp->depth.mm, &decimals, &depth_unit);
|
||||
if (!o2warning_exist)
|
||||
put_string(&buf, "<div>\n");
|
||||
o2warning_exist = true;
|
||||
asprintf_loc(&temp, translate("gettextFromC", "high pO₂ value %.2f at %d:%02u with gas %s at depth %.*f %s"),
|
||||
pressures.o2, FRACTION(dp->time, 60), gasname(gasmix), decimals, depth_value, depth_unit);
|
||||
put_format(&buf, "<span style='color: red;'>%s </span> %s<br/>\n", translate("gettextFromC", "Warning:"), temp);
|
||||
free(temp);
|
||||
} else if (pressures.o2 < 0.16) {
|
||||
const char *depth_unit;
|
||||
int decimals;
|
||||
double depth_value = get_depth_units(dp->depth.mm, &decimals, &depth_unit);
|
||||
if (!o2warning_exist)
|
||||
put_string(&buf, "<div>");
|
||||
o2warning_exist = true;
|
||||
asprintf_loc(&temp, translate("gettextFromC", "low pO₂ value %.2f at %d:%02u with gas %s at depth %.*f %s"),
|
||||
pressures.o2, FRACTION(dp->time, 60), gasname(gasmix), decimals, depth_value, depth_unit);
|
||||
put_format(&buf, "<span style='color: red;'>%s </span> %s<br/>\n", translate("gettextFromC", "Warning:"), temp);
|
||||
free(temp);
|
||||
if (pressures.o2 > (dp->entered ? prefs.bottompo2 : prefs.decopo2) / 1000.0) {
|
||||
const char *depth_unit;
|
||||
int decimals;
|
||||
double depth_value = get_depth_units(dp->depth.mm, &decimals, &depth_unit);
|
||||
if (!o2warning_exist)
|
||||
buf += "<div>\n";
|
||||
o2warning_exist = true;
|
||||
temp = casprintf_loc(translate("gettextFromC", "high pO₂ value %.2f at %d:%02u with gas %s at depth %.*f %s"),
|
||||
pressures.o2, FRACTION_TUPLE(dp->time, 60), gasname(gasmix), decimals, depth_value, depth_unit);
|
||||
buf += format_string_std("<span style='color: red;'>%s </span> %s<br/>\n", translate("gettextFromC", "Warning:"), temp.c_str());
|
||||
} else if (pressures.o2 < 0.16) {
|
||||
const char *depth_unit;
|
||||
int decimals;
|
||||
double depth_value = get_depth_units(dp->depth.mm, &decimals, &depth_unit);
|
||||
if (!o2warning_exist)
|
||||
buf += "<div>";
|
||||
o2warning_exist = true;
|
||||
temp = casprintf_loc(translate("gettextFromC", "low pO₂ value %.2f at %d:%02u with gas %s at depth %.*f %s"),
|
||||
pressures.o2, FRACTION_TUPLE(dp->time, 60), gasname(gasmix), decimals, depth_value, depth_unit);
|
||||
buf += format_string_std("<span style='color: red;'>%s </span> %s<br/>\n", translate("gettextFromC", "Warning:"), temp.c_str());
|
||||
}
|
||||
}
|
||||
dp = dp->next;
|
||||
}
|
||||
dp = dp->next;
|
||||
}
|
||||
if (o2warning_exist)
|
||||
buf += "</div>\n";
|
||||
}
|
||||
if (o2warning_exist)
|
||||
put_string(&buf, "</div>\n");
|
||||
finished:
|
||||
// TODO: avoid copy
|
||||
free(dive->notes);
|
||||
dive->notes = detach_cstring(&buf);
|
||||
dive->notes = strdup(buf.c_str());
|
||||
#ifdef DEBUG_PLANNER_NOTES
|
||||
printf("<!DOCTYPE html>\n<html>\n\t<head><title>plannernotes</title><meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/></head>\n\t<body>\n%s\t</body>\n</html>\n", dive->notes);
|
||||
#endif
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user