Merge branch 'develop' into cleanup

This commit is contained in:
gavin 2022-07-22 15:10:34 +05:30 committed by GitHub
commit 7eb7ec2663
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
197 changed files with 4635 additions and 2836 deletions

View file

@ -9,6 +9,6 @@ trim_trailing_whitespace = true
charset = utf-8
# python, js indentation settings
[{*.py,*.js,*.vue}]
[{*.py,*.js,*.vue,*.css,*.scss,*.html}]
indent_style = tab
indent_size = 4

View file

@ -1,9 +1,9 @@
#!/bin/bash
set -e
cd ~ || exit
echo "Setting Up Bench..."
pip install frappe-bench
bench -v init frappe-bench --skip-assets --python "$(which python)" --frappe-path "${GITHUB_WORKSPACE}"
@ -17,10 +17,6 @@ if [ "$TYPE" == "server" ]; then
fi
if [ "$DB" == "mariadb" ];then
curl -LsS -O https://downloads.mariadb.com/MariaDB/mariadb_repo_setup
sudo bash mariadb_repo_setup --mariadb-server-version=10.6
sudo apt install mariadb-client
mariadb --host 127.0.0.1 --port 3306 -u root -ptravis -e "SET GLOBAL character_set_server = 'utf8mb4'";
mariadb --host 127.0.0.1 --port 3306 -u root -ptravis -e "SET GLOBAL collation_server = 'utf8mb4_unicode_ci'";
@ -56,12 +52,8 @@ bench -v setup requirements --dev
if [ "$TYPE" == "ui" ]; then sed -i 's/^web: bench serve/web: bench serve --with-coverage/g' Procfile; fi
# install node-sass which is required for website theme test
cd ./apps/frappe || exit
yarn add node-sass@4.13.1
cd ../..
bench start &
bench start &> bench_start.log &
bench --site test_site reinstall --yes
if [ "$TYPE" == "server" ]; then bench --site test_site_producer reinstall --yes; fi
if [ "$TYPE" == "server" ]; then CI=Yes bench build --app frappe; fi

View file

@ -1,19 +1,13 @@
#!/bin/bash
set -e
# Check for merge conflicts before proceeding
python -m compileall -f "${GITHUB_WORKSPACE}"
if grep -lr --exclude-dir=node_modules "^<<<<<<< " "${GITHUB_WORKSPACE}"
then echo "Found merge conflicts"
exit 1
fi
echo "Setting Up System Dependencies..."
# install wkhtmltopdf
wget -O /tmp/wkhtmltox.tar.xz https://github.com/frappe/wkhtmltopdf/raw/master/wkhtmltox-0.12.3_linux-generic-amd64.tar.xz
tar -xf /tmp/wkhtmltox.tar.xz -C /tmp
sudo mv /tmp/wkhtmltox/bin/wkhtmltopdf /usr/local/bin/wkhtmltopdf
sudo chmod o+x /usr/local/bin/wkhtmltopdf
wget https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb
sudo apt install ./wkhtmltox_0.12.6-1.focal_amd64.deb
# install cups
sudo apt update && sudo apt install libcups2-dev redis-server
curl -LsS -O https://downloads.mariadb.com/MariaDB/mariadb_repo_setup
sudo bash mariadb_repo_setup --mariadb-server-version=10.6
sudo apt update
sudo apt install libcups2-dev redis-server libmariadb3 libmariadb-dev mariadb-client

View file

@ -46,7 +46,7 @@ def is_ci(file):
return ".github" in file
def is_frontend_code(file):
return file.lower().endswith((".css", ".scss", ".less", ".sass", ".styl", ".js", ".ts", ".vue"))
return file.lower().endswith((".css", ".scss", ".less", ".sass", ".styl", ".js", ".ts", ".vue", ".html"))
def is_docs(file):
regex = re.compile(r'\.(md|png|jpg|jpeg|csv|svg)$|^.github|LICENSE')

30
.github/semantic.yml vendored
View file

@ -1,30 +0,0 @@
# Always validate the PR title AND all the commits
titleAndCommits: true
# Allow use of Merge commits (eg on github: "Merge branch 'master' into feature/ride-unicorns")
# this is only relevant when using commitsOnly: true (or titleAndCommits: true)
allowMergeCommits: true
# Allow use of Revert commits (eg on github: "Revert "feat: ride unicorns"")
# this is only relevant when using commitsOnly: true (or titleAndCommits: true)
allowRevertCommits: true
# For allowed PR types: https://github.com/commitizen/conventional-commit-types/blob/v3.0.0/index.json
# Tool Reference: https://github.com/zeke/semantic-pull-requests
# By default types specified in commitizen/conventional-commit-types is used.
# See: https://github.com/commitizen/conventional-commit-types/blob/v3.0.0/index.json
# You can override the valid types
types:
- BREAKING CHANGE
- feat
- fix
- docs
- style
- refactor
- perf
- test
- build
- ci
- chore
- revert

View file

@ -30,16 +30,13 @@ jobs:
- name: Clone
uses: actions/checkout@v3
- name: Setup Python
uses: "gabrielfalcao/pyenv-action@v10"
with:
versions: 3.10:latest, 3.7:latest
- name: Setup Node
uses: actions/setup-node@v3
with:
node-version: 14
check-latest: true
- name: Check for valid Python & Merge Conflicts
run: |
python -m compileall -f "${GITHUB_WORKSPACE}"
if grep -lr --exclude-dir=node_modules "^<<<<<<< " "${GITHUB_WORKSPACE}"
then echo "Found merge conflicts"
exit 1
fi
- name: Check if build should be run
id: check-build
@ -50,6 +47,19 @@ jobs:
PR_NUMBER: ${{ github.event.number }}
REPO_NAME: ${{ github.repository }}
- name: Setup Python
if: ${{ steps.check-build.outputs.build == 'strawberry' }}
uses: "gabrielfalcao/pyenv-action@v10"
with:
versions: 3.10:latest, 3.7:latest
- name: Setup Node
if: ${{ steps.check-build.outputs.build == 'strawberry' }}
uses: actions/setup-node@v3
with:
node-version: 16
check-latest: true
- name: Add to Hosts
if: ${{ steps.check-build.outputs.build == 'strawberry' }}
run: echo "127.0.0.1 test_site" | sudo tee -a /etc/hosts
@ -92,22 +102,17 @@ jobs:
${{ runner.os }}-yarn-
- name: Install Dependencies
if: ${{ steps.check-build.outputs.build == 'strawberry' }}
run: bash ${GITHUB_WORKSPACE}/.github/helper/install_dependencies.sh
env:
BEFORE: ${{ env.GITHUB_EVENT_PATH.before }}
AFTER: ${{ env.GITHUB_EVENT_PATH.after }}
TYPE: server
- name: Install
if: ${{ steps.check-build.outputs.build == 'strawberry' }}
run: |
bash ${GITHUB_WORKSPACE}/.github/helper/install_dependencies.sh
pip install frappe-bench
pyenv global $(pyenv versions | grep '3.10')
bash ${GITHUB_WORKSPACE}/.github/helper/install.sh
env:
DB: mariadb
BEFORE: ${{ env.GITHUB_EVENT_PATH.before }}
AFTER: ${{ env.GITHUB_EVENT_PATH.after }}
TYPE: server
DB: mariadb
- name: Run Patch Tests
if: ${{ steps.check-build.outputs.build == 'strawberry' }}

View file

@ -15,7 +15,7 @@ jobs:
path: 'frappe'
- uses: actions/setup-node@v3
with:
node-version: 14
node-version: 16
- uses: actions/setup-python@v4
with:
python-version: '3.10'

View file

@ -16,9 +16,11 @@ jobs:
- uses: actions/checkout@v3
with:
path: 'frappe'
- uses: actions/setup-node@v3
with:
python-version: '12.x'
node-version: 16
- uses: actions/setup-python@v4
with:
python-version: '3.10'

View file

@ -16,10 +16,10 @@ jobs:
with:
fetch-depth: 0
persist-credentials: false
- name: Setup Node.js v14
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: 14
node-version: 16
- name: Setup dependencies
run: |
npm install @semantic-release/git @semantic-release/exec --no-save
@ -31,4 +31,4 @@ jobs:
GIT_AUTHOR_EMAIL: "developers@frappe.io"
GIT_COMMITTER_NAME: "Frappe PR Bot"
GIT_COMMITTER_EMAIL: "developers@frappe.io"
run: npx semantic-release
run: npx semantic-release

30
.github/workflows/semantic-commits.yml vendored Normal file
View file

@ -0,0 +1,30 @@
name: Semantic Commits
on:
pull_request: {}
permissions:
contents: read
concurrency:
group: commitcheck-frappe-${{ github.event.number }}
cancel-in-progress: true
jobs:
commitlint:
name: Check Commit Titles
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 200
- uses: actions/setup-node@v3
with:
node-version: 16
check-latest: true
- name: Check commit titles
run: |
npm install @commitlint/cli @commitlint/config-conventional
npx commitlint --verbose --from ${{ github.event.pull_request.base.sha }} --to ${{ github.event.pull_request.head.sha }}

View file

@ -44,6 +44,14 @@ jobs:
with:
python-version: '3.10'
- name: Check for valid Python & Merge Conflicts
run: |
python -m compileall -f "${GITHUB_WORKSPACE}"
if grep -lr --exclude-dir=node_modules "^<<<<<<< " "${GITHUB_WORKSPACE}"
then echo "Found merge conflicts"
exit 1
fi
- name: Check if build should be run
id: check-build
run: |
@ -56,7 +64,7 @@ jobs:
- uses: actions/setup-node@v3
if: ${{ steps.check-build.outputs.build == 'strawberry' }}
with:
node-version: 14
node-version: 16
check-latest: true
- name: Add to Hosts
@ -104,18 +112,14 @@ jobs:
- name: Install Dependencies
if: ${{ steps.check-build.outputs.build == 'strawberry' }}
run: bash ${GITHUB_WORKSPACE}/.github/helper/install_dependencies.sh
run: |
bash ${GITHUB_WORKSPACE}/.github/helper/install_dependencies.sh
bash ${GITHUB_WORKSPACE}/.github/helper/install.sh
env:
BEFORE: ${{ env.GITHUB_EVENT_PATH.before }}
AFTER: ${{ env.GITHUB_EVENT_PATH.after }}
TYPE: server
- name: Install
if: ${{ steps.check-build.outputs.build == 'strawberry' }}
run: bash ${GITHUB_WORKSPACE}/.github/helper/install.sh
env:
DB: mariadb
TYPE: server
- name: Run Tests
if: ${{ steps.check-build.outputs.build == 'strawberry' }}

View file

@ -47,6 +47,14 @@ jobs:
with:
python-version: '3.10'
- name: Check for valid Python & Merge Conflicts
run: |
python -m compileall -f "${GITHUB_WORKSPACE}"
if grep -lr --exclude-dir=node_modules "^<<<<<<< " "${GITHUB_WORKSPACE}"
then echo "Found merge conflicts"
exit 1
fi
- name: Check if build should be run
id: check-build
run: |
@ -59,7 +67,7 @@ jobs:
- uses: actions/setup-node@v3
if: ${{ steps.check-build.outputs.build == 'strawberry' }}
with:
node-version: '14'
node-version: '16'
check-latest: true
- name: Add to Hosts
@ -107,18 +115,14 @@ jobs:
- name: Install Dependencies
if: ${{ steps.check-build.outputs.build == 'strawberry' }}
run: bash ${GITHUB_WORKSPACE}/.github/helper/install_dependencies.sh
run: |
bash ${GITHUB_WORKSPACE}/.github/helper/install_dependencies.sh
bash ${GITHUB_WORKSPACE}/.github/helper/install.sh
env:
BEFORE: ${{ env.GITHUB_EVENT_PATH.before }}
AFTER: ${{ env.GITHUB_EVENT_PATH.after }}
TYPE: server
- name: Install
if: ${{ steps.check-build.outputs.build == 'strawberry' }}
run: bash ${GITHUB_WORKSPACE}/.github/helper/install.sh
env:
DB: postgres
TYPE: server
- name: Run Tests
if: ${{ steps.check-build.outputs.build == 'strawberry' }}

View file

@ -43,6 +43,14 @@ jobs:
with:
python-version: '3.10'
- name: Check for valid Python & Merge Conflicts
run: |
python -m compileall -f "${GITHUB_WORKSPACE}"
if grep -lr --exclude-dir=node_modules "^<<<<<<< " "${GITHUB_WORKSPACE}"
then echo "Found merge conflicts"
exit 1
fi
- name: Check if build should be run
id: check-build
run: |
@ -55,7 +63,7 @@ jobs:
- uses: actions/setup-node@v3
if: ${{ steps.check-build.outputs.build == 'strawberry' }}
with:
node-version: 14
node-version: 16
check-latest: true
- name: Add to Hosts
@ -113,18 +121,14 @@ jobs:
- name: Install Dependencies
if: ${{ steps.check-build.outputs.build == 'strawberry' }}
run: bash ${GITHUB_WORKSPACE}/.github/helper/install_dependencies.sh
run: |
bash ${GITHUB_WORKSPACE}/.github/helper/install_dependencies.sh
bash ${GITHUB_WORKSPACE}/.github/helper/install.sh
env:
BEFORE: ${{ env.GITHUB_EVENT_PATH.before }}
AFTER: ${{ env.GITHUB_EVENT_PATH.after }}
TYPE: ui
- name: Install
if: ${{ steps.check-build.outputs.build == 'strawberry' }}
run: bash ${GITHUB_WORKSPACE}/.github/helper/install.sh
env:
DB: mariadb
TYPE: ui
- name: Instrument Source Code
if: ${{ steps.check-build.outputs.build == 'strawberry' }}
@ -175,3 +179,7 @@ jobs:
files: /home/runner/frappe-bench/sites/coverage.xml
verbose: true
flags: server
- name: Show bench console if tests failed
if: ${{ failure() }}
run: cat ~/frappe-bench/bench_start.log

View file

@ -21,6 +21,7 @@ pull_request_rules:
- name: Automatic merge on CI success and review
conditions:
- status-success=Sider
- status-success=Check Commit Titles
- status-success=Python Unit Tests (MariaDB) (1)
- status-success=Python Unit Tests (MariaDB) (2)
- status-success=Python Unit Tests (Postgres) (1)

25
commitlint.config.js Normal file
View file

@ -0,0 +1,25 @@
module.exports = {
parserPreset: 'conventional-changelog-conventionalcommits',
rules: {
'subject-empty': [2, 'never'],
'type-case': [2, 'always', 'lower-case'],
'type-empty': [2, 'never'],
'type-enum': [
2,
'always',
[
'build',
'chore',
'ci',
'docs',
'feat',
'fix',
'perf',
'refactor',
'revert',
'style',
'test',
],
],
},
};

View file

@ -6,6 +6,7 @@ context('Form', () => {
return frappe.call("frappe.tests.ui_test_helpers.create_contact_records");
});
});
it('create a new form', () => {
cy.visit('/app/todo/new');
cy.get_field('description', 'Text Editor').type('this is a test todo', {force: true}).wait(200);
@ -17,7 +18,8 @@ context('Form', () => {
cy.get('.primary-action').click();
cy.wait('@form_save').its('response.statusCode').should('eq', 200);
cy.visit('/app/todo');
cy.go_to_list('ToDo');
cy.clear_filters()
cy.get('.page-head').findByTitle('To Do').should('exist');
cy.get('.list-row').should('contain', 'this is a test todo');
});
@ -94,4 +96,63 @@ context('Form', () => {
})
})
});
it('let user undo/redo field value changes', { scrollBehavior: false }, () => {
const jump_to_field = (field_label) => {
cy.get("body")
.type("{esc}") // lose focus if any
.type("{ctrl+j}") // jump to field
.type(field_label)
.wait(500)
.type("{enter}")
.wait(200)
.type("{enter}")
.wait(500);
};
const type_value = (value) => {
cy.focused()
.clear()
.type(value)
.type("{esc}");
};
const undo = () => cy.get("body").type("{esc}").type("{ctrl+z}").wait(500);
const redo = () => cy.get("body").type("{esc}").type("{ctrl+y}").wait(500);
cy.new_form('User');
jump_to_field("Email");
type_value("admin@example.com");
jump_to_field("Username");
type_value("admin42");
jump_to_field("Birth Date");
type_value("12-31-01");
jump_to_field("Send Welcome Email");
cy.focused().uncheck()
// make a mistake
jump_to_field("Username");
type_value("admin24");
// undo behaviour
undo();
cy.get_field("username").should('have.value', 'admin42');
// redo behaviour
redo();
cy.get_field("username").should('have.value', 'admin24');
// undo everything & redo everything, ensure same values at the end
undo(); undo(); undo(); undo(); undo();
redo(); redo(); redo(); redo(); redo();
cy.get_field("username").should('have.value', 'admin24');
cy.get_field("email").should('have.value', 'admin@example.com');
cy.get_field("birth_date").should('have.value', '12-31-2001'); // parsed value
cy.get_field("send_welcome_email").should('not.be.checked');
});
});

View file

@ -9,6 +9,7 @@ context('List Paging', () => {
it('test load more with count selection buttons', () => {
cy.visit('/app/todo/view/report');
cy.clear_filters()
cy.get('.list-paging-area .list-count').should('contain.text', '20 of');
cy.get('.list-paging-area .btn-more').click();

View file

@ -9,6 +9,7 @@ context('List View', () => {
it('Keep checkbox checked after Refresh', () => {
cy.go_to_list('ToDo');
cy.clear_filters()
cy.get('.list-row-container .list-row-checkbox').click({ multiple: true, force: true });
cy.get('.actions-btn-group button').contains('Actions').should('be.visible');
cy.intercept('/api/method/frappe.desk.reportview.get').as('list-refresh');
@ -21,6 +22,7 @@ context('List View', () => {
it('enables "Actions" button', () => {
const actions = ['Approve', 'Reject', 'Edit', 'Export', 'Assign To', 'Apply Assignment Rule', 'Add Tags', 'Print', 'Delete'];
cy.go_to_list('ToDo');
cy.clear_filters()
cy.get('.list-row-container:contains("Pending") .list-row-checkbox').click({ multiple: true, force: true });
cy.get('.actions-btn-group button').contains('Actions').should('be.visible').click();
cy.get('.dropdown-menu li:visible .dropdown-item').should('have.length', 9).each((el, index) => {

View file

@ -1,4 +1,4 @@
context('Recorder', () => {
context.skip('Recorder', () => {
before(() => {
cy.login();
});

View file

@ -12,7 +12,8 @@ context('Timeline', () => {
cy.get('[data-fieldname="description"] .ql-editor.ql-blank').type('Test ToDo', {force: true}).wait(200);
cy.get('.page-head .page-actions').findByRole('button', {name: 'Save'}).click();
cy.visit('/app/todo');
cy.go_to_list('ToDo');
cy.clear_filters()
cy.click_listview_row_item(0);
//To check if the comment box is initially empty and tying some text into it
@ -79,4 +80,4 @@ context('Timeline', () => {
cy.get('.page-actions .actions-btn-group [data-label="Delete"]').click();
cy.click_modal_primary_button('Yes');
});
});
});

View file

@ -3,24 +3,253 @@ context('Web Form', () => {
cy.login();
});
it('Create Web Form', () => {
cy.visit('/app/web-form/new');
cy.intercept('POST', '/api/method/frappe.desk.form.save.savedocs').as('save_form');
cy.fill_field('title', 'Note');
cy.fill_field('doc_type', 'Note', 'Link');
cy.fill_field('module', 'Website', 'Link');
cy.click_custom_action_button('Get Fields');
cy.click_custom_action_button('Publish');
cy.wait('@save_form');
cy.get_field('route').should('have.value', 'note');
cy.get('.title-area .indicator-pill').contains('Published');
});
it('Open Web Form (Logged in User)', () => {
cy.visit('/note');
cy.fill_field('title', 'Note 1');
cy.get('.web-form-actions button').contains('Save').click();
cy.url().should('include', '/note/Note%201');
cy.visit('/note');
cy.url().should('include', '/note/Note%201');
});
it('Open Web Form (Guest)', () => {
cy.request('/api/method/logout');
cy.visit('/note');
cy.url().should('include', '/note/new');
cy.fill_field('title', 'Guest Note 1');
cy.get('.web-form-actions button').contains('Save').click();
cy.url().should('include', '/note/new');
cy.visit('/note');
cy.url().should('include', '/note/new');
});
it('Login Required', () => {
cy.login();
cy.visit('/app/web-form/note');
cy.findByRole("tab", {name: "Form Settings"}).click();
cy.get('input[data-fieldname="login_required"]').check({force: true});
cy.save();
cy.visit('/note');
cy.url().should('include', '/note/Note%201');
cy.call('logout');
cy.visit('/note');
cy.get_open_dialog()
.get('.modal-message')
.contains('You are not permitted to access this page without login.');
});
it('Show List', () => {
cy.login();
cy.visit('/app/web-form/note');
cy.findByRole("tab", {name: "List Settings"}).click();
cy.get('input[data-fieldname="show_list"]').check();
cy.save();
cy.visit('/note');
cy.url().should('include', '/note/list');
cy.get('.web-list-table').should('be.visible');
});
it('Show Custom List Title', () => {
cy.visit('/app/web-form/note');
cy.findByRole("tab", {name: "List Settings"}).click();
cy.fill_field('list_title', 'Note List');
cy.save();
cy.visit('/note');
cy.url().should('include', '/note/list');
cy.get('.web-list-header h1').should('contain.text', 'Note List');
});
it('Show Custom List Columns', () => {
cy.visit('/note');
cy.url().should('include', '/note/list');
cy.get('.web-list-table thead th').contains('Name');
cy.get('.web-list-table thead th').contains('Title');
cy.visit('/app/web-form/note');
cy.findByRole("tab", {name: "List Settings"}).click();
cy.get('[data-fieldname="list_columns"] .grid-footer button').contains('Add Row').as('add-row');
cy.get('@add-row').click();
cy.get('[data-fieldname="list_columns"] .grid-body .rows').as('grid-rows');
cy.get('@grid-rows').find('.grid-row:first [data-fieldname="fieldname"]').click();
cy.get('@grid-rows').find('.grid-row:first select[data-fieldname="fieldname"]').select('Title (Data)');
cy.get('@add-row').click();
cy.get('@grid-rows').find('.grid-row[data-idx="2"] [data-fieldname="fieldname"]').click();
cy.get('@grid-rows').find('.grid-row[data-idx="2"] select[data-fieldname="fieldname"]').select('Public (Check)');
cy.get('@add-row').click();
cy.get('@grid-rows').find('.grid-row:last [data-fieldname="fieldname"]').click();
cy.get('@grid-rows').find('.grid-row:last select[data-fieldname="fieldname"]').select('Content (Text Editor)');
cy.save();
cy.visit('/note');
cy.url().should('include', '/note/list');
cy.get('.web-list-table thead th').contains('Title');
cy.get('.web-list-table thead th').contains('Public');
cy.get('.web-list-table thead th').contains('Content');
});
it('Breadcrumbs', () => {
cy.visit('/note/Note 1');
cy.get('.breadcrumb-container .breadcrumb .breadcrumb-item:first a')
.should('contain.text', 'Note').click();
cy.url().should('include', '/note/list');
});
it('Custom Breadcrumbs', () => {
cy.visit('/app/web-form/note');
cy.findByRole("tab", {name: "Form Settings"}).click();
cy.get('.form-section .section-head').contains('Customization').click();
cy.fill_field('breadcrumbs', '[{"label": _("Notes"), "route":"note"}]', 'Code');
cy.get('.form-section .section-head').contains('Customization').click();
cy.save();
cy.visit('/note/Note 1');
cy.get('.breadcrumb-container .breadcrumb .breadcrumb-item:first a')
.should('contain.text', 'Notes');
});
it('Read Only', () => {
cy.login();
cy.visit('/note');
cy.url().should('include', '/note/list');
// Read Only Field
cy.get('.web-list-table tbody tr[id="Note 1"]').click();
cy.get('.frappe-control[data-fieldname="title"] .control-input')
.should('have.css', 'display', 'none');
});
it('Edit Mode', () => {
cy.visit('/app/web-form/note');
cy.findByRole("tab", {name: "Form Settings"}).click();
cy.get('input[data-fieldname="allow_edit"]').check();
cy.save();
cy.visit('/note/Note 1');
cy.url().should('include', '/note/Note%201');
cy.get('.web-form-actions a').contains('Edit').click();
cy.url().should('include', '/note/Note%201/edit');
// Editable Field
cy.get_field('title').should('have.value', 'Note 1');
cy.fill_field('title', ' Edited');
cy.get('.web-form-actions button').contains('Save').click();
cy.get_field('title').should('have.value', 'Note 1 Edited');
});
it('Allow Multiple Response', () => {
cy.visit('/app/web-form/note');
cy.findByRole("tab", {name: "Form Settings"}).click();
cy.get('input[data-fieldname="allow_multiple"]').check();
cy.save();
cy.visit('/note');
cy.url().should('include', '/note/list');
cy.get('.web-list-actions a:visible').contains('New').click();
cy.url().should('include', '/note/new');
cy.fill_field('title', 'Note 2');
cy.get('.web-form-actions button').contains('Save').click();
});
it('Allow Delete', () => {
cy.visit('/app/web-form/note');
cy.findByRole("tab", {name: "Form Settings"}).click();
cy.get('input[data-fieldname="allow_delete"]').check();
cy.save();
cy.visit('/note');
cy.url().should('include', '/note/list');
cy.get('.web-list-table tbody tr[id="Note 1"] .list-col-checkbox').click();
cy.get('.web-list-table tbody tr[id="Note 2"] .list-col-checkbox').click();
cy.get('.web-list-actions button:visible').contains('Delete').click({force: true});
cy.get('.web-list-actions button').contains('Delete').should('not.be.visible');
cy.visit('/note');
cy.get('.web-list-table tbody tr[id="Note 1"]').should('not.exist');
cy.get('.web-list-table tbody tr[id="Note 2"]').should('not.exist');
cy.get('.web-list-table tbody tr[id="Guest Note 1"]').should('exist');
});
it('Navigate and Submit a WebForm', () => {
cy.visit('/update-profile');
cy.get_field('last_name', 'Data').type('_Test User', {force: true}).wait(200);
cy.get('.web-form-actions a').contains('Edit').click();
cy.fill_field('last_name', '_Test User');
cy.get('.web-form-actions .btn-primary').click();
cy.wait(5000);
cy.url().should('include', '/me');
});
it('Navigate and Submit a MultiStep WebForm', () => {
cy.call('frappe.tests.ui_test_helpers.update_webform_to_multistep').then(() => {
cy.visit('/update-profile-duplicate');
cy.get_field('last_name', 'Data').type('_Test User', {force: true}).wait(200);
cy.get('.web-form-actions a').contains('Edit').click();
cy.fill_field('last_name', '_Test User');
cy.get('.btn-next').should('be.visible');
cy.get('.btn-next').click();
cy.get('.btn-previous').should('be.visible');
cy.get('.btn-next').should('not.be.visible');
cy.get('.web-form-actions .btn-primary').click();
cy.wait(5000);
cy.url().should('include', '/me');
});
});

View file

@ -162,7 +162,12 @@ Cypress.Commands.add('fill_field', (fieldname, value, fieldtype = 'Data') => {
if (fieldtype === 'Select') {
cy.get('@input').select(value);
} else {
cy.get('@input').type(value, {waitForAnimations: false, force: true, delay: 100});
cy.get('@input').type(value, {
waitForAnimations: false,
parseSpecialCharSequences: false,
force: true,
delay: 100
});
}
return cy.get('@input');
});
@ -358,6 +363,10 @@ Cypress.Commands.add('open_list_filter', () => {
cy.get('.filter-popover').should('exist');
});
Cypress.Commands.add('click_custom_action_button', (name) => {
cy.get(`.custom-actions [data-label="${encodeURIComponent(name)}"]`).click();
});
Cypress.Commands.add('click_action_button', (name) => {
cy.findByRole('button', {name: 'Actions'}).click();
cy.get(`.actions-btn-group [data-label="${encodeURIComponent(name)}"]`).click();

View file

@ -22,7 +22,12 @@ from typing import TYPE_CHECKING, Any, Callable, Literal, Optional, overload
import click
from werkzeug.local import Local, release_local
from frappe.query_builder import get_query_builder, patch_query_aggregation, patch_query_execute
from frappe.query_builder import (
get_qb_engine,
get_query_builder,
patch_query_aggregation,
patch_query_execute,
)
from frappe.utils.caching import request_cache
from frappe.utils.data import cstr, sbool
@ -238,7 +243,7 @@ def init(site: str, sites_path: str = ".", new_site: bool = False) -> None:
local.session = _dict()
local.dev_server = _dev_server
local.qb = get_query_builder(local.conf.db_type or "mariadb")
local.qb.engine = get_qb_engine()
setup_module_map()
if not _qb_patched.get(local.conf.db_type):
@ -278,7 +283,9 @@ def connect_replica():
user = local.conf.replica_db_name
password = local.conf.replica_db_password
local.replica_db = get_db(host=local.conf.replica_host, user=user, password=password, port=port)
local.replica_db = get_db(
host=local.conf.replica_host, user=user, password=password, port=port, read_only=True
)
# swap db connections
local.primary_db = local.db
@ -1175,11 +1182,11 @@ def get_doc(*args, **kwargs) -> "Document":
return doc
def get_last_doc(doctype, filters=None, order_by="creation desc"):
def get_last_doc(doctype, filters=None, order_by="creation desc", *, for_update=False):
"""Get last created document of this type."""
d = get_all(doctype, filters=filters, limit_page_length=1, order_by=order_by, pluck="name")
if d:
return get_doc(doctype, d[0])
return get_doc(doctype, d[0], for_update=for_update)
else:
raise DoesNotExistError
@ -1791,6 +1798,14 @@ def respond_as_web_page(
local.response["context"] = context
def redirect(url):
"""Raise a 301 redirect to url"""
from frappe.exceptions import Redirect
flags.redirect_location = url
raise Redirect
def redirect_to_message(title, html, http_status_code=None, context=None, indicator_color=None):
"""Redirects to /message?id=random
Similar to respond_as_web_page, but used to 'redirect' and show message pages like success, failure, etc. with a detailed message

View file

@ -25,7 +25,7 @@ from frappe.utils import get_site_name, sanitize_html
from frappe.utils.error import make_error_snapshot
from frappe.website.serve import get_response
local_manager = LocalManager([frappe.local])
local_manager = LocalManager(frappe.local)
_site = None
_sites_path = os.environ.get("SITES_PATH", ".")
@ -44,6 +44,7 @@ class RequestContext:
frappe.destroy()
@local_manager.middleware
@Request.application
def application(request):
response = None
@ -313,9 +314,6 @@ def after_request(rollback):
return rollback
application = local_manager.make_middleware(application)
def serve(
port=8000, profile=False, no_reload=False, no_threading=False, site=None, sites_path="."
):

View file

@ -14,7 +14,7 @@ from frappe.email.inbox import get_email_accounts
from frappe.model.base_document import get_controller
from frappe.query_builder import DocType
from frappe.query_builder.functions import Count
from frappe.query_builder.terms import SubQuery
from frappe.query_builder.terms import ParameterizedValueWrapper, SubQuery
from frappe.social.doctype.energy_point_log.energy_point_log import get_energy_points
from frappe.social.doctype.energy_point_settings.energy_point_settings import (
is_energy_point_enabled,
@ -328,11 +328,11 @@ def get_unseen_notes():
frappe.qb.from_(note)
.select(note.name, note.title, note.content, note.notify_on_every_login)
.where(
(note.notify_on_every_login == 1)
(note.notify_on_login == 1)
& (note.expire_notification_on > frappe.utils.now())
& (
SubQuery(frappe.qb.from_(nsb).select(nsb.user).where(nsb.parent == note.name)).notin(
[frappe.session.user]
ParameterizedValueWrapper(frappe.session.user).notin(
SubQuery(frappe.qb.from_(nsb).select(nsb.user).where(nsb.parent == note.name))
)
)
)

View file

@ -274,13 +274,6 @@ def delete(doctype, name):
frappe.delete_doc(doctype, name, ignore_missing=False)
@frappe.whitelist(methods=["POST", "PUT"])
def set_default(key, value, parent=None):
"""set a user default value"""
frappe.db.set_default(key, value, parent or frappe.session.user)
frappe.clear_cache(user=frappe.session.user)
@frappe.whitelist(methods=["POST", "PUT"])
def bulk_update(docs):
"""Bulk update documents

View file

@ -8,21 +8,6 @@ from frappe.exceptions import SiteNotSpecifiedError
from frappe.utils import cint
def _is_scheduler_enabled():
enable_scheduler = False
try:
frappe.connect()
enable_scheduler = (
cint(frappe.db.get_single_value("System Settings", "enable_scheduler")) and True or False
)
except Exception:
pass
finally:
frappe.db.close()
return enable_scheduler
@click.command("trigger-scheduler-event", help="Trigger a scheduler event")
@click.argument("event")
@pass_context

View file

@ -86,7 +86,6 @@ def new_site(
db_type=db_type,
db_host=db_host,
db_port=db_port,
new_site=True,
)
if set_default:
@ -844,9 +843,10 @@ def _drop_site(
archived_sites_path = archived_sites_path or os.path.join(
frappe.get_app_path("frappe"), "..", "..", "..", "archived", "sites"
)
archived_sites_path = os.path.realpath(archived_sites_path)
click.secho(f"Moving site to archive under {archived_sites_path}", fg="green")
os.makedirs(archived_sites_path, exist_ok=True)
move(archived_sites_path, site)

View file

@ -523,22 +523,24 @@ def postgres(context):
def _mariadb():
from frappe.database.mariadb.database import MariaDBDatabase
mysql = find_executable("mysql")
os.execv(
command = [
mysql,
[
mysql,
"-u",
frappe.conf.db_name,
"-p" + frappe.conf.db_password,
frappe.conf.db_name,
"-h",
frappe.conf.db_host or "localhost",
"--pager=less -SFX",
"--safe-updates",
"-A",
],
)
"--port",
frappe.conf.db_port or MariaDBDatabase.default_port,
"-u",
frappe.conf.db_name,
f"-p{frappe.conf.db_password}",
frappe.conf.db_name,
"-h",
frappe.conf.db_host or "localhost",
"--pager=less -SFX",
"--safe-updates",
"-A",
]
os.execv(mysql, command)
def _psql():
@ -872,13 +874,20 @@ def run_ui_tests(
and os.path.exists(real_events_plugin_path)
and os.path.exists(testing_library_path)
and os.path.exists(coverage_plugin_path)
and cint(subprocess.getoutput("npm view cypress version")[:1]) >= 6
):
# install cypress
# install cypress & dependent plugins
click.secho("Installing Cypress...", fg="yellow")
frappe.commands.popen(
"yarn add cypress@^6 cypress-file-upload@^5 @4tw/cypress-drag-drop@^2 cypress-real-events @testing-library/cypress@^8 @cypress/code-coverage@^3 --no-lockfile"
packages = " ".join(
[
"cypress@^6",
"cypress-file-upload@^5",
"@4tw/cypress-drag-drop@^2",
"cypress-real-events",
"@testing-library/cypress@^8",
"@cypress/code-coverage@^3",
]
)
frappe.commands.popen(f"yarn add {packages} --no-lockfile")
# run for headless mode
run_or_open = "run --browser chrome --record" if headless else "open"

View file

@ -1,4 +1,5 @@
{
"actions": [],
"creation": "2019-02-07 10:10:46.845678",
"doctype": "DocType",
"editable_grid": 1,
@ -17,7 +18,8 @@
"link_name",
"reference_owner",
"section_break_10",
"content"
"content",
"ip_address"
],
"fields": [
{
@ -102,9 +104,16 @@
"ignore_xss_filter": 1,
"in_list_view": 1,
"label": "Content"
},
{
"fieldname": "ip_address",
"fieldtype": "Data",
"hidden": 1,
"label": "IP Address"
}
],
"modified": "2019-09-02 21:00:10.784787",
"links": [],
"modified": "2022-07-12 17:35:31.774137",
"modified_by": "Administrator",
"module": "Core",
"name": "Comment",
@ -138,6 +147,7 @@
"quick_entry": 1,
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"title_field": "comment_type",
"track_changes": 1
}

View file

@ -73,7 +73,8 @@ class CommunicationEmailMixin:
if include_sender:
cc.append(self.sender_mailid)
if is_inbound_mail_communcation:
cc.append(self.get_owner())
if (doc_owner := self.get_owner()) not in frappe.STANDARD_USERS:
cc.append(doc_owner)
cc = set(cc) - {self.sender_mailid}
cc.update(self.get_assignees())
@ -92,7 +93,7 @@ class CommunicationEmailMixin:
cc_list = self.mail_cc(
is_inbound_mail_communcation=is_inbound_mail_communcation, include_sender=include_sender
)
return [self.get_email_with_displayname(email) for email in cc_list]
return [self.get_email_with_displayname(email) for email in cc_list if email]
def mail_bcc(self, is_inbound_mail_communcation=False):
"""
@ -120,7 +121,7 @@ class CommunicationEmailMixin:
def get_mail_bcc_with_displayname(self, is_inbound_mail_communcation=False):
bcc_list = self.mail_bcc(is_inbound_mail_communcation=is_inbound_mail_communcation)
return [self.get_email_with_displayname(email) for email in bcc_list]
return [self.get_email_with_displayname(email) for email in bcc_list if email]
def mail_sender(self):
email_account = self.get_outgoing_email_account()

View file

@ -391,7 +391,6 @@ def create_email_account() -> "EmailAccount":
"send_notification_to": "test_comm@example.com",
"pop3_server": "pop.test.example.com",
"imap_folder": [{"folder_name": "INBOX", "append_to": "ToDo"}],
"no_remaining": "0",
"enable_automatic_linking": 1,
}
).insert(ignore_permissions=True)

View file

@ -46,9 +46,7 @@ frappe.ui.form.on('DocType', {
}
if(frm.is_new()) {
if (!(frm.doc.permissions && frm.doc.permissions.length)) {
frm.add_child('permissions', {role: 'System Manager'});
}
frm.events.set_default_permission(frm);
} else {
frm.toggle_enable("engine", 0);
}
@ -65,6 +63,14 @@ frappe.ui.form.on('DocType', {
if (frm.doc.istable && frm.is_new()) {
frm.set_value('autoname', 'autoincrement');
frm.set_value('allow_rename', 0);
} else if (!frm.doc.istable && !frm.is_new()) {
frm.events.set_default_permission(frm);
}
},
set_default_permission: (frm) => {
if (!(frm.doc.permissions && frm.doc.permissions.length)) {
frm.add_child('permissions', {role: 'System Manager'});
}
},
});

View file

@ -17,7 +17,7 @@ from frappe.cache_manager import clear_controller_cache, clear_user_cache
from frappe.custom.doctype.custom_field.custom_field import create_custom_field
from frappe.custom.doctype.property_setter.property_setter import make_property_setter
from frappe.database.schema import validate_column_length, validate_column_name
from frappe.desk.notifications import delete_notification_count_for
from frappe.desk.notifications import delete_notification_count_for, get_filters_for
from frappe.desk.utils import validate_route_conflict
from frappe.model import (
child_table_fields,
@ -181,10 +181,6 @@ class DocType(Document):
)
)
def after_insert(self):
# clear user cache so that on the next reload this doctype is included in boot
clear_user_cache(frappe.session.user)
def set_defaults_for_single_and_table(self):
if self.issingle:
self.allow_import = 0
@ -412,6 +408,9 @@ class DocType(Document):
delete_notification_count_for(doctype=self.name)
frappe.clear_cache(doctype=self.name)
# clear user cache so that on the next reload this doctype is included in boot
clear_user_cache(frappe.session.user)
if not frappe.flags.in_install and hasattr(self, "before_update"):
self.sync_global_search()
@ -983,11 +982,7 @@ def validate_links_table_fieldnames(meta):
fieldnames = tuple(field.fieldname for field in meta.fields)
for index, link in enumerate(meta.links, 1):
if not frappe.get_meta(link.link_doctype).has_field(link.link_fieldname):
message = _("Document Links Row #{0}: Could not find field {1} in {2} DocType").format(
index, frappe.bold(link.link_fieldname), frappe.bold(link.link_doctype)
)
frappe.throw(message, InvalidFieldNameError, _("Invalid Fieldname"))
_test_connection_query(doctype=link.link_doctype, field=link.link_fieldname, idx=index)
if not link.is_child_table:
continue
@ -1016,6 +1011,25 @@ def validate_links_table_fieldnames(meta):
frappe.throw(message, frappe.ValidationError, _("Invalid Table Fieldname"))
def _test_connection_query(doctype, field, idx):
"""Make sure that connection can be queried.
This function executes query similar to one that would be executed for
finding count on dashboard and hence validates if fieldname/doctype are
correct.
"""
filters = get_filters_for(doctype) or {}
filters[field] = ""
try:
frappe.get_all(doctype, filters=filters, limit=1, distinct=True, ignore_ifnull=True)
except Exception as e:
frappe.clear_last_message()
msg = _("Document Links Row #{0}: Invalid doctype or fieldname.").format(idx)
msg += "<br>" + str(e)
frappe.throw(msg, InvalidFieldNameError)
def validate_fields_for_doctype(doctype):
meta = frappe.get_meta(doctype, cached=False)
validate_links_table_fieldnames(meta)

View file

@ -543,7 +543,7 @@ class TestDocType(unittest.TestCase):
# check invalid doctype
doc.append("links", {"link_doctype": "User2", "link_fieldname": "first_name"})
self.assertRaises(frappe.DoesNotExistError, validate_links_table_fieldnames, doc)
self.assertRaises(InvalidFieldNameError, validate_links_table_fieldnames, doc)
doc.links = [] # reset links table
# check invalid fieldname

View file

@ -1,8 +0,0 @@
// Copyright (c) 2021, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Feedback', {
// refresh: function(frm) {
// }
});

View file

@ -1,73 +0,0 @@
{
"actions": [],
"creation": "2021-06-03 19:02:55.328423",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"reference_name",
"column_break_3",
"like",
"ip_address"
],
"fields": [
{
"fieldname": "column_break_3",
"fieldtype": "Column Break"
},
{
"fieldname": "reference_doctype",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Reference Document Type",
"options": "\nBlog Post"
},
{
"fieldname": "reference_name",
"fieldtype": "Dynamic Link",
"in_list_view": 1,
"label": "Reference Name",
"options": "reference_doctype",
"reqd": 1
},
{
"fieldname": "ip_address",
"fieldtype": "Data",
"hidden": 1,
"label": "IP Address",
"read_only": 1
},
{
"default": "0",
"fieldname": "like",
"fieldtype": "Check",
"label": "Like"
}
],
"index_web_pages_for_search": 1,
"links": [],
"modified": "2021-11-10 20:53:21.255593",
"modified_by": "Administrator",
"module": "Core",
"name": "Feedback",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC",
"title_field": "reference_name",
"track_changes": 1
}

View file

@ -1,9 +0,0 @@
# Copyright (c) 2021, Frappe Technologies and contributors
# License: MIT. See LICENSE
# import frappe
from frappe.model.document import Document
class Feedback(Document):
pass

View file

@ -1,42 +0,0 @@
# Copyright (c) 2021, Frappe Technologies and Contributors
# License: MIT. See LICENSE
import unittest
import frappe
class TestFeedback(unittest.TestCase):
def tearDown(self):
frappe.form_dict.reference_doctype = None
frappe.form_dict.reference_name = None
frappe.form_dict.like = None
frappe.local.request_ip = None
def test_feedback_creation_updation(self):
from frappe.website.doctype.blog_post.test_blog_post import make_test_blog
test_blog = make_test_blog()
frappe.db.delete("Feedback", {"reference_doctype": "Blog Post"})
from frappe.templates.includes.feedback.feedback import give_feedback
frappe.form_dict.reference_doctype = "Blog Post"
frappe.form_dict.reference_name = test_blog.name
frappe.form_dict.like = True
frappe.local.request_ip = "127.0.0.1"
feedback = give_feedback()
self.assertEqual(feedback.like, True)
frappe.form_dict.like = False
updated_feedback = give_feedback()
self.assertEqual(updated_feedback.like, False)
frappe.db.delete("Feedback", {"reference_doctype": "Blog Post"})
test_blog.delete()

View file

@ -327,7 +327,7 @@ def attach_files_to_document(doc: "File", event) -> None:
folder="Home/Attachments",
)
try:
file.insert()
file.insert(ignore_permissions=True)
except Exception:
doc.log_error("Error Attaching File")

View file

@ -63,7 +63,7 @@ def send_sms(receiver_list, msg, sender_name="", success_msg=True):
"success_msg": success_msg,
}
if frappe.db.get_value("SMS Settings", None, "sms_gateway_url"):
if frappe.db.get_single_value("SMS Settings", "sms_gateway_url"):
send_via_gateway(arg)
else:
msgprint(_("Please Update SMS Settings"))

View file

@ -28,7 +28,7 @@ class SystemSettings(Document):
if self.enable_two_factor_auth:
if self.two_factor_method == "SMS":
if not frappe.db.get_value("SMS Settings", None, "sms_gateway_url"):
if not frappe.db.get_single_value("SMS Settings", "sms_gateway_url"):
frappe.throw(
_("Please setup SMS before setting it as an authentication method, via SMS Settings")
)
@ -44,12 +44,7 @@ class SystemSettings(Document):
frappe.flags.update_last_reset_password_date = True
def on_update(self):
for df in self.meta.get("fields"):
if df.fieldtype not in no_value_fields and self.has_value_changed(df.fieldname):
frappe.db.set_default(df.fieldname, self.get(df.fieldname))
if self.language:
set_default_language(self.language)
self.set_defaults()
frappe.cache().delete_value("system_settings")
frappe.cache().delete_value("time_zone")
@ -57,6 +52,14 @@ class SystemSettings(Document):
if frappe.flags.update_last_reset_password_date:
update_last_reset_password_date()
def set_defaults(self):
for df in self.meta.get("fields"):
if df.fieldtype not in no_value_fields and self.has_value_changed(df.fieldname):
frappe.db.set_default(df.fieldname, self.get(df.fieldname))
if self.language:
set_default_language(self.language)
def update_last_reset_password_date():
frappe.db.sql(

View file

@ -173,14 +173,16 @@ frappe.ui.form.on('User', {
});
}
frm.add_custom_button(__("Reset OTP Secret"), function() {
frappe.call({
method: "frappe.twofactor.reset_otp_secret",
args: {
"user": frm.doc.name
}
});
}, __("Password"));
if (frappe.session.user == doc.name || frappe.user.has_role("System Manager")) {
frm.add_custom_button(__("Reset OTP Secret"), function() {
frappe.call({
method: "frappe.twofactor.reset_otp_secret",
args: {
"user": frm.doc.name
}
});
}, __("Password"));
}
frm.trigger('enabled');
@ -285,6 +287,18 @@ frappe.ui.form.on('User', {
}
});
frappe.ui.form.on('User Email', {
email_account(frm, cdt, cdn) {
let child_row = locals[cdt][cdn];
frappe.model.get_value("Email Account", child_row.email_account, "auth_method", (value) => {
child_row.used_oauth = value.auth_method === "OAuth";
frm.refresh_field("user_emails", cdn, "used_oauth");
});
}
});
function has_access_to_edit_user() {
return has_common(frappe.user_roles, get_roles_for_editing_user());
}

View file

@ -134,11 +134,11 @@ class User(Document):
if self.time_zone:
frappe.defaults.set_default("time_zone", self.time_zone, self.name)
if self.has_value_changed("allow_in_mentions") or self.has_value_changed("user_type"):
frappe.cache().delete_key("users_for_mentions")
if self.has_value_changed("enabled"):
frappe.cache().delete_key("users_for_mentions")
frappe.cache().delete_key("enabled_users")
elif self.has_value_changed("allow_in_mentions") or self.has_value_changed("user_type"):
frappe.cache().delete_key("users_for_mentions")
def has_website_permission(self, ptype, user, verbose=False):
"""Returns true if current user is the session user"""
@ -611,10 +611,10 @@ class User(Document):
"""
login_with_mobile = cint(
frappe.db.get_value("System Settings", "System Settings", "allow_login_using_mobile_number")
frappe.db.get_single_value("System Settings", "allow_login_using_mobile_number")
)
login_with_username = cint(
frappe.db.get_value("System Settings", "System Settings", "allow_login_using_user_name")
frappe.db.get_single_value("System Settings", "allow_login_using_user_name")
)
or_filters = [{"name": user_name}]
@ -763,19 +763,11 @@ def has_email_account(email):
@frappe.whitelist(allow_guest=False)
def get_email_awaiting(user):
waiting = frappe.get_all(
return frappe.get_all(
"User Email",
fields=["email_account", "email_id"],
filters={"awaiting_password": 1, "parent": user},
filters={"awaiting_password": 1, "parent": user, "used_oauth": 0},
)
if waiting:
return waiting
else:
user_email_table = DocType("User Email")
frappe.qb.update(user_email_table).set(user_email_table.user_email_table, 0).where(
user_email_table.parent == user
).run()
return False
def ask_pass_update():
@ -783,7 +775,7 @@ def ask_pass_update():
from frappe.utils import set_default
password_list = frappe.get_all(
"User Email", filters={"awaiting_password": True}, pluck="parent", distinct=True
"User Email", filters={"awaiting_password": 1, "used_oauth": 0}, pluck="parent", distinct=True
)
set_default("email_user_password", ",".join(password_list))
@ -869,7 +861,7 @@ def sign_up(email, full_name, redirect_to):
user.insert()
# set default signup role as per Portal Settings
default_role = frappe.db.get_value("Portal Settings", None, "default_role")
default_role = frappe.db.get_single_value("Portal Settings", "default_role")
if default_role:
user.add_roles(default_role)

View file

@ -9,6 +9,7 @@
"email_id",
"column_break_3",
"awaiting_password",
"used_oauth",
"enable_outgoing"
],
"fields": [
@ -48,16 +49,25 @@
"fieldtype": "Check",
"label": "Enable Outgoing",
"read_only": 1
},
{
"default": "0",
"fieldname": "used_oauth",
"fieldtype": "Check",
"in_list_view": 1,
"label": "Used OAuth",
"read_only": 1
}
],
"istable": 1,
"links": [],
"modified": "2020-04-06 19:19:12.130246",
"modified": "2022-06-03 14:25:46.944733",
"modified_by": "Administrator",
"module": "Core",
"name": "User Email",
"owner": "Administrator",
"permissions": [],
"sort_field": "modified",
"sort_order": "DESC"
"sort_order": "DESC",
"states": []
}

View file

@ -18,8 +18,8 @@
{% for item in data.changed %}
<tr>
<td>{{ frappe.meta.get_label(doc.ref_doctype, item[0]) }}</td>
<td class="danger">{{ item[1] }}</td>
<td class="success">{{ item[2] }}</td>
<td class="diff-remove">{{ item[1] }}</td>
<td class="diff-add">{{ item[2] }}</td>
</tr>
{% endfor %}
</tbody>
@ -43,8 +43,7 @@
{% for item in values %}
<tr>
<td>{{ frappe.meta.get_label(doc.ref_doctype, item[0]) }}</td>
<td class="{{
key==="added" ? __("success") : __("danger") }}">
<td class="{{ key==="added" ? "diff-add" : "diff-remove" }}">
{% var item_keys = Object.keys(item[1]).sort(); %}
<table class="table table-bordered">
<tbody>
@ -86,8 +85,8 @@
<td>{{ frappe.meta.get_label(doc.ref_doctype, table_info[0]) }}</td>
<td>{{ table_info[1] }}</td>
<td>{{ item[0] }}</td>
<td class="danger">{{ item[1] }}</td>
<td class="success">{{ item[2] }}</td>
<td class="diff-remove">{{ item[1] }}</td>
<td class="diff-add">{{ item[2] }}</td>
</tr>
{% endfor %}
{% endfor %}

View file

@ -18,9 +18,10 @@
"introduction_text": "",
"is_multi_step_form": 0,
"is_standard": 1,
"list_columns": [],
"login_required": 1,
"max_attachment_size": 0,
"modified": "2022-03-22 15:00:43.456738",
"modified": "2022-07-18 16:51:19.796411",
"modified_by": "Administrator",
"module": "Core",
"name": "edit-profile",
@ -29,9 +30,8 @@
"route": "update-profile",
"route_to_success_link": 0,
"show_attachments": 0,
"show_in_grid": 0,
"show_list": 0,
"show_sidebar": 0,
"sidebar_items": [],
"success_message": "Profile updated successfully.",
"success_url": "/me",
"title": "Update Profile",

View file

@ -39,17 +39,21 @@ def drop_user_and_database(db_name, root_login=None, root_password=None):
)
def get_db(host=None, user=None, password=None, port=None):
def get_db(host=None, user=None, password=None, port=None, read_only=False):
import frappe
if frappe.conf.db_type == "postgres":
import frappe.database.postgres.database
return frappe.database.postgres.database.PostgresDatabase(host, user, password, port=port)
return frappe.database.postgres.database.PostgresDatabase(
host, user, password, port=port, read_only=read_only
)
else:
import frappe.database.mariadb.database
return frappe.database.mariadb.database.MariaDBDatabase(host, user, password, port=port)
return frappe.database.mariadb.database.MariaDBDatabase(
host, user, password, port=port, read_only=read_only
)
def setup_help_database(help_db_name):

View file

@ -1,22 +1,29 @@
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
# Database Module
# --------------------
import datetime
import json
import random
import re
import string
import traceback
from contextlib import contextmanager
from time import time
from pypika.terms import Criterion, NullValue, PseudoColumn
from pypika.terms import Criterion, NullValue
import frappe
import frappe.defaults
import frappe.model.meta
from frappe import _
from frappe.database.utils import (
EmptyQueryValues,
FallBackDateTimeStr,
LazyMogrify,
Query,
QueryValues,
is_query_type,
)
from frappe.exceptions import DoesNotExistError
from frappe.model.utils.link_count import flush_local_link_count
from frappe.query_builder.functions import Count
@ -30,10 +37,6 @@ SINGLE_WORD_PATTERN = re.compile(r'([`"]?)(tab([A-Z]\w+))\1')
MULTI_WORD_PATTERN = re.compile(r'([`"])(tab([A-Z]\w+)( [A-Z]\w+)+)\1')
def is_query_type(query: str, query_type: str | tuple[str]) -> bool:
return query.lstrip().split(maxsplit=1)[0].lower().startswith(query_type)
class Database:
"""
Open a database connection with the given parmeters, if use_default is True, use the
@ -54,12 +57,22 @@ class Database:
class InvalidColumnName(frappe.ValidationError):
pass
def __init__(self, host=None, user=None, password=None, ac_name=None, use_default=0, port=None):
def __init__(
self,
host=None,
user=None,
password=None,
ac_name=None,
use_default=0,
port=None,
read_only=False,
):
self.setup_type_map()
self.host = host or frappe.conf.db_host or "127.0.0.1"
self.port = port or frappe.conf.db_port or ""
self.user = user or frappe.conf.db_name
self.db_name = frappe.conf.db_name
self.read_only = read_only # Uses READ ONLY connection if set
self._conn = None
if ac_name:
@ -73,15 +86,8 @@ class Database:
self.password = password or frappe.conf.db_password
self.value_cache = {}
@property
def query(self):
if not hasattr(self, "_query"):
from .query import Query
self._query = Query()
del Query
return self._query
# self.db_type: str
# self.last_query (lazy) attribute of last sql query executed
def setup_type_map(self):
pass
@ -98,15 +104,22 @@ class Database:
self._conn.select_db(db_name)
def get_connection(self):
pass
"""Returns a Database connection object that conforms with https://peps.python.org/pep-0249/#connection-objects"""
raise NotImplementedError
def get_database_size(self):
pass
raise NotImplementedError
def _transform_query(self, query: Query, values: QueryValues) -> tuple:
return query, values
def _transform_result(self, result: list[tuple]) -> list[tuple]:
return result
def sql(
self,
query,
values=(),
query: Query,
values: QueryValues = EmptyQueryValues,
as_dict=0,
as_list=0,
formatted=0,
@ -122,7 +135,7 @@ class Database:
"""Execute a SQL query and fetch all rows.
:param query: SQL query.
:param values: List / dict of values to be escaped and substituted in the query.
:param values: Tuple / List / Dict of values to be escaped and substituted in the query.
:param as_dict: Return as a dictionary.
:param as_list: Always return as a list.
:param formatted: Format values like date etc.
@ -161,46 +174,25 @@ class Database:
# in transaction validations
self.check_transaction_status(query)
self.clear_db_table_cache(query)
# autocommit
if auto_commit:
self.commit()
# execute
if debug:
time_start = time()
if values == EmptyQueryValues:
values = None
elif not isinstance(values, (tuple, dict, list)):
values = (values,)
query, values = self._transform_query(query, values)
try:
if debug:
time_start = time()
self.log_query(query, values, debug, explain)
if values != ():
# MySQL-python==1.2.5 hack!
if not isinstance(values, (dict, tuple, list)):
values = (values,)
self._cursor.execute(query, values)
if frappe.flags.in_migrate:
self.log_touched_tables(query, values)
else:
self._cursor.execute(query)
if frappe.flags.in_migrate:
self.log_touched_tables(query)
if debug:
time_end = time()
frappe.errprint(("Execution time: {} sec").format(round(time_end - time_start, 2)))
self._cursor.execute(query, values)
except Exception as e:
if self.is_syntax_error(e):
# only for mariadb
frappe.errprint("Syntax error in query:")
frappe.errprint(query)
frappe.errprint(f"Syntax error in query:\n{query} {values}")
elif self.is_deadlocked(e):
raise frappe.QueryDeadlockError(e)
@ -208,29 +200,39 @@ class Database:
elif self.is_timedout(e):
raise frappe.QueryTimeoutError(e)
elif frappe.conf.db_type == "postgres":
# TODO: added temporarily
import traceback
# TODO: added temporarily
elif self.db_type == "postgres":
traceback.print_stack()
print(e)
frappe.errprint(f"Error in query:\n{e}")
raise
if ignore_ddl and (
self.is_missing_column(e) or self.is_table_missing(e) or self.cant_drop_field_or_key(e)
):
pass
else:
elif isinstance(e, self.ProgrammingError):
traceback.print_stack()
frappe.errprint(f"Error in query:\n{query, values}")
raise
if not (
ignore_ddl
and (self.is_missing_column(e) or self.is_table_missing(e) or self.cant_drop_field_or_key(e))
):
raise
if debug:
time_end = time()
frappe.errprint(f"Execution time: {time_end - time_start:.2f} sec")
self.log_query(query, values, debug, explain)
if auto_commit:
self.commit()
if not self._cursor.description:
return ()
self.last_result = self._transform_result(self._cursor.fetchall())
if pluck:
return [r[0] for r in self._cursor.fetchall()]
return [r[0] for r in self.last_result]
# scrub output if required
if as_dict:
@ -240,53 +242,72 @@ class Database:
r.update(update)
return ret
elif as_list:
return self.convert_to_lists(self._cursor.fetchall(), formatted, as_utf8)
return self.convert_to_lists(self.last_result, formatted, as_utf8)
elif as_utf8:
return self.convert_to_lists(self._cursor.fetchall(), formatted, as_utf8)
return self.convert_to_lists(self.last_result, formatted, as_utf8)
else:
return self._cursor.fetchall()
return self.last_result
def log_query(self, query, values, debug, explain):
# for debugging in tests
if frappe.conf.get("allow_tests") and frappe.cache().get_value("flag_print_sql"):
print(self.mogrify(query, values))
def _log_query(self, mogrified_query: str, debug: bool = False, explain: bool = False) -> None:
"""Takes the query and logs it to various interfaces according to the settings."""
_query = None
if frappe.conf.allow_tests and frappe.cache().get_value("flag_print_sql"):
_query = _query or str(mogrified_query)
print(_query)
# debug
if debug:
if explain and is_query_type(query, "select"):
self.explain_query(query, values)
frappe.errprint(self.mogrify(query, values))
_query = _query or str(mogrified_query)
if explain and is_query_type(_query, "select"):
self.explain_query(_query)
frappe.errprint(_query)
# info
if (frappe.conf.get("logging") or False) == 2:
frappe.log("<<<< query")
frappe.log(self.mogrify(query, values))
frappe.log(">>>>")
if frappe.conf.logging == 2:
_query = _query or str(mogrified_query)
frappe.log(f"<<<< query\n{_query}\n>>>>")
def mogrify(self, query, values):
if frappe.flags.in_migrate:
_query = _query or str(mogrified_query)
self.log_touched_tables(_query)
def log_query(
self, query: str, values: QueryValues = None, debug: bool = False, explain: bool = False
) -> str:
# TODO: Use mogrify until MariaDB Connector/C 1.1 is released and we can fetch something
# like cursor._transformed_statement from the cursor object. We can also avoid setting
# mogrified_query if we don't need to log it.
mogrified_query = self.lazy_mogrify(query, values)
self._log_query(mogrified_query, debug, explain)
return mogrified_query
def mogrify(self, query: Query, values: QueryValues):
"""build the query string with values"""
if not values:
return query
else:
try:
return self._cursor.mogrify(query, values)
except Exception:
return (query, values)
try:
return self._cursor.mogrify(query, values)
except AttributeError:
if isinstance(values, dict):
return query % {k: frappe.db.escape(v) if isinstance(v, str) else v for k, v in values.items()}
elif isinstance(values, (list, tuple)):
return query % tuple(frappe.db.escape(v) if isinstance(v, str) else v for v in values)
return query, values
def lazy_mogrify(self, query: Query, values: QueryValues) -> LazyMogrify:
"""Wrap the object with str to generate mogrified query."""
return LazyMogrify(query, values)
def explain_query(self, query, values=None):
"""Print `EXPLAIN` in error log."""
frappe.errprint("--- query explain ---")
try:
frappe.errprint("--- query explain ---")
if values is None:
self._cursor.execute("explain " + query)
else:
self._cursor.execute("explain " + query, values)
import json
self._cursor.execute(f"EXPLAIN {query}", values)
except Exception as e:
frappe.errprint(f"error in query explain: {e}")
else:
frappe.errprint(json.dumps(self.fetch_as_dict(), indent=1))
frappe.errprint("--- query explain end ---")
except Exception:
frappe.errprint("error in query explain")
def sql_list(self, query, values=(), debug=False, **kwargs):
"""Return data as list of single elements (first column).
@ -333,7 +354,7 @@ class Database:
def fetch_as_dict(self, formatted=0, as_utf8=0):
"""Internal. Converts results to dict."""
result = self._cursor.fetchall()
result = self.last_result
ret = []
if result:
keys = [column[0] for column in self._cursor.description]
@ -599,7 +620,7 @@ class Database:
return [map(values.get, fields)]
else:
r = self.query.get_sql(
r = frappe.qb.engine.get_query(
"Singles",
filters={"field": ("in", tuple(fields)), "doctype": doctype},
fields=["field", "value"],
@ -632,7 +653,7 @@ class Database:
# Get coulmn and value of the single doctype Accounts Settings
account_settings = frappe.db.get_singles_dict("Accounts Settings")
"""
queried_result = self.query.get_sql(
queried_result = frappe.qb.engine.get_query(
"Singles",
filters={"doctype": doctype},
fields=["field", "value"],
@ -705,7 +726,7 @@ class Database:
if cache and fieldname in self.value_cache[doctype]:
return self.value_cache[doctype][fieldname]
val = self.query.get_sql(
val = frappe.qb.engine.get_query(
table="Singles",
filters={"doctype": doctype, "field": fieldname},
fields="value",
@ -747,14 +768,7 @@ class Database:
):
field_objects = []
if not isinstance(fields, Criterion):
for field in fields:
if "(" in str(field) or " as " in str(field):
field_objects.append(PseudoColumn(field))
else:
field_objects.append(field)
query = self.query.get_sql(
query = frappe.qb.engine.get_query(
table=doctype,
filters=filters,
orderby=order_by,
@ -864,7 +878,7 @@ class Database:
frappe.clear_document_cache(dt, docname)
else:
query = self.query.build_conditions(table=dt, filters=dn, update=True)
query = frappe.qb.engine.build_conditions(table=dt, filters=dn, update=True)
# TODO: Fix this; doesn't work rn - gavin@frappe.io
# frappe.cache().hdel_keys(dt, "document_cache")
# Workaround: clear all document caches
@ -886,13 +900,8 @@ class Database:
def touch(self, doctype, docname):
"""Update the modified timestamp of this document."""
modified = now()
self.sql(
"""update `tab{doctype}` set `modified`=%s
where name=%s""".format(
doctype=doctype
),
(modified, docname),
)
DocType = frappe.qb.DocType(doctype)
frappe.qb.update(DocType).set(DocType.modified, modified).where(DocType.name == docname).run()
return modified
@staticmethod
@ -951,7 +960,7 @@ class Database:
frappe.call(method[0], *(method[1] or []), **(method[2] or {}))
self.sql("commit")
if frappe.conf.db_type == "postgres":
if self.db_type == "postgres":
# Postgres requires explicitly starting new transaction
self.begin()
@ -1007,22 +1016,11 @@ class Database:
return self.table_exists(doctype)
def get_tables(self, cached=True):
tables = frappe.cache().get_value("db_tables")
if not tables or not cached:
table_rows = self.sql(
"""
SELECT table_name
FROM information_schema.tables
WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
"""
)
tables = {d[0] for d in table_rows}
frappe.cache().set_value("db_tables", tables)
return tables
raise NotImplementedError
def a_row_exists(self, doctype):
"""Returns True if atleast one row exists."""
return self.sql(f"select name from `tab{doctype}` limit 1")
return frappe.get_all(doctype, limit=1, order_by=None, as_list=True)
def exists(self, dt, dn=None, cache=False):
"""Return the document name of a matching document, or None.
@ -1065,7 +1063,9 @@ class Database:
cache_count = frappe.cache().get_value(f"doctype:count:{dt}")
if cache_count is not None:
return cache_count
query = self.query.get_sql(table=dt, filters=filters, fields=Count("*"), distinct=distinct)
query = frappe.qb.engine.get_query(
table=dt, filters=filters, fields=Count("*"), distinct=distinct
)
count = self.sql(query, debug=debug)[0][0]
if not filters and cache:
frappe.cache().set_value(f"doctype:count:{dt}", count, expires_in_sec=86400)
@ -1078,7 +1078,7 @@ class Database:
@staticmethod
def format_datetime(datetime):
if not datetime:
return "0001-01-01 00:00:00.000000"
return FallBackDateTimeStr
if isinstance(datetime, str):
if ":" not in datetime:
@ -1094,28 +1094,27 @@ class Database:
from frappe.utils import now_datetime
return self.sql(
"""select count(name) from `tab{doctype}`
where creation >= %s""".format(
doctype=doctype
),
now_datetime() - relativedelta(minutes=minutes),
)[0][0]
Table = frappe.qb.DocType(doctype)
return (
frappe.qb.from_(Table)
.select(Count(Table.name))
.where(Table.creation >= now_datetime() - relativedelta(minutes=minutes))
.run()[0][0]
)
def get_db_table_columns(self, table) -> list[str]:
"""Returns list of column names from given table."""
columns = frappe.cache().hget("table_columns", table)
if columns is None:
columns = [
r[0]
for r in self.sql(
"""
select column_name
from information_schema.columns
where table_name = %s """,
table,
)
]
information_schema = frappe.qb.Schema("information_schema")
columns = (
frappe.qb.from_(information_schema.columns)
.select(information_schema.columns.column_name)
.where(information_schema.columns.table_name == table)
.run(pluck=True)
)
if columns:
frappe.cache().hset("table_columns", table, columns)
@ -1134,12 +1133,19 @@ class Database:
return column in self.get_table_columns(doctype)
def get_column_type(self, doctype, column):
return self.sql(
"""SELECT column_type FROM INFORMATION_SCHEMA.COLUMNS
WHERE table_name = 'tab{}' AND column_name = '{}' """.format(
doctype, column
"""Returns column type from database."""
information_schema = frappe.qb.Schema("information_schema")
table = get_table_name(doctype)
return (
frappe.qb.from_(information_schema.columns)
.select(information_schema.columns.column_type)
.where(
(information_schema.columns.table_name == table)
& (information_schema.columns.column_name == column)
)
)[0][0]
.run(pluck=True)[0]
)
def has_index(self, table_name, index_name):
raise NotImplementedError
@ -1162,7 +1168,6 @@ class Database:
def close(self):
"""Close database connection."""
if self._conn:
# self._cursor.close()
self._conn.close()
self._cursor = None
self._conn = None
@ -1191,7 +1196,7 @@ class Database:
return self.is_missing_column(e) or self.is_table_missing(e)
def multisql(self, sql_dict, values=(), **kwargs):
current_dialect = frappe.db.db_type or "mariadb"
current_dialect = self.db_type or "mariadb"
query = sql_dict.get(current_dialect)
return self.sql(query, values, **kwargs)
@ -1202,7 +1207,7 @@ class Database:
Doctype name can be passed directly, it will be pre-pended with `tab`.
"""
filters = filters or kwargs.get("conditions")
query = self.query.build_conditions(table=doctype, filters=filters).delete()
query = frappe.qb.engine.build_conditions(table=doctype, filters=filters).delete()
if "debug" not in kwargs:
kwargs["debug"] = debug
return query.run(**kwargs)
@ -1225,9 +1230,7 @@ class Database:
else:
return None
def log_touched_tables(self, query, values=None):
if values:
query = frappe.safe_decode(self._cursor.mogrify(query, values))
def log_touched_tables(self, query):
if is_query_type(query, ("insert", "delete", "update", "alter", "drop", "rename")):
# single_word_regex is designed to match following patterns
# `tabXxx`, tabXxx and "tabXxx"
@ -1266,9 +1269,9 @@ class Database:
query = frappe.qb.into(table)
if ignore_duplicates:
# Pypika does not have same api for ignoring duplicates
if frappe.conf.db_type == "mariadb":
if self.db_type == "mariadb":
query = query.ignore()
elif frappe.conf.db_type == "postgres":
elif self.db_type == "postgres":
query = query.on_conflict().do_nothing()
values_to_insert = values[start_index : start_index + chunk_size]

View file

@ -1,5 +1,3 @@
import os
import frappe
@ -15,63 +13,51 @@ class DbManager:
return self.db.sql("select user()")[0][0].split("@")[1]
def create_user(self, user, password, host=None):
# Create user if it doesn't exist.
if not host:
host = self.get_current_host()
if password:
self.db.sql(f"CREATE USER '{user}'@'{host}' IDENTIFIED BY '{password}';")
else:
self.db.sql(f"CREATE USER '{user}'@'{host}';")
host = host or self.get_current_host()
password_predicate = f" IDENTIFIED BY '{password}'" if password else ""
self.db.sql(f"CREATE USER '{user}'@'{host}'{password_predicate}")
def delete_user(self, target, host=None):
if not host:
host = self.get_current_host()
try:
self.db.sql(f"DROP USER '{target}'@'{host}';")
except Exception as e:
if e.args[0] == 1396:
pass
else:
raise
host = host or self.get_current_host()
self.db.sql(f"DROP USER IF EXISTS '{target}'@'{host}'")
def create_database(self, target):
if target in self.get_database_list():
self.drop_database(target)
self.db.sql("CREATE DATABASE `%s` ;" % target)
self.db.sql(f"CREATE DATABASE `{target}`")
def drop_database(self, target):
self.db.sql("DROP DATABASE IF EXISTS `%s`;" % target)
self.db.sql_ddl(f"DROP DATABASE IF EXISTS `{target}`")
def grant_all_privileges(self, target, user, host=None):
if not host:
host = self.get_current_host()
if frappe.conf.get("rds_db", 0) == 1:
self.db.sql(
"GRANT SELECT, INSERT, UPDATE, DELETE, CREATE, DROP, INDEX, ALTER, CREATE TEMPORARY TABLES, CREATE VIEW, EVENT, TRIGGER, SHOW VIEW, CREATE ROUTINE, ALTER ROUTINE, EXECUTE, LOCK TABLES ON `%s`.* TO '%s'@'%s';"
% (target, user, host)
host = host or self.get_current_host()
permissions = (
(
"SELECT, INSERT, UPDATE, DELETE, CREATE, DROP, INDEX, ALTER, "
"CREATE TEMPORARY TABLES, CREATE VIEW, EVENT, TRIGGER, SHOW VIEW, "
"CREATE ROUTINE, ALTER ROUTINE, EXECUTE, LOCK TABLES"
)
else:
self.db.sql(f"GRANT ALL PRIVILEGES ON `{target}`.* TO '{user}'@'{host}';")
if frappe.conf.rds_db
else "ALL PRIVILEGES"
)
self.db.sql(f"GRANT {permissions} ON `{target}`.* TO '{user}'@'{host}'")
def flush_privileges(self):
self.db.sql("FLUSH PRIVILEGES")
def get_database_list(self):
"""get list of databases"""
return [d[0] for d in self.db.sql("SHOW DATABASES")]
return self.db.sql("SHOW DATABASES", pluck=True)
@staticmethod
def restore_database(target, source, user, password):
import os
from distutils.spawn import find_executable
from frappe.utils import make_esc
esc = make_esc("$ ")
from distutils.spawn import find_executable
pv = find_executable("pv")
if pv:
pipe = f"{pv} {source} |"
source = ""

View file

@ -1,3 +1,5 @@
import re
import pymysql
from pymysql.constants import ER, FIELD_TYPE
from pymysql.converters import conversions, escape_string
@ -7,14 +9,125 @@ from frappe.database.database import Database
from frappe.database.mariadb.schema import MariaDBTable
from frappe.utils import UnicodeWithAttrs, cstr, get_datetime, get_table_name
_PARAM_COMP = re.compile(r"%\([\w]*\)s")
class MariaDBDatabase(Database):
ProgrammingError = pymysql.err.ProgrammingError
TableMissingError = pymysql.err.ProgrammingError
OperationalError = pymysql.err.OperationalError
InternalError = pymysql.err.InternalError
SQLError = pymysql.err.ProgrammingError
DataError = pymysql.err.DataError
class MariaDBExceptionUtil:
ProgrammingError = pymysql.ProgrammingError
TableMissingError = pymysql.ProgrammingError
OperationalError = pymysql.OperationalError
InternalError = pymysql.InternalError
SQLError = pymysql.ProgrammingError
DataError = pymysql.DataError
# match ER_SEQUENCE_RUN_OUT - https://mariadb.com/kb/en/mariadb-error-codes/
SequenceGeneratorLimitExceeded = pymysql.OperationalError
SequenceGeneratorLimitExceeded.errno = 4084
@staticmethod
def is_deadlocked(e: pymysql.Error) -> bool:
return e.args[0] == ER.LOCK_DEADLOCK
@staticmethod
def is_timedout(e: pymysql.Error) -> bool:
return e.args[0] == ER.LOCK_WAIT_TIMEOUT
@staticmethod
def is_table_missing(e: pymysql.Error) -> bool:
return e.args[0] == ER.NO_SUCH_TABLE
@staticmethod
def is_missing_table(e: pymysql.Error) -> bool:
return MariaDBDatabase.is_table_missing(e)
@staticmethod
def is_missing_column(e: pymysql.Error) -> bool:
return e.args[0] == ER.BAD_FIELD_ERROR
@staticmethod
def is_duplicate_fieldname(e: pymysql.Error) -> bool:
return e.args[0] == ER.DUP_FIELDNAME
@staticmethod
def is_duplicate_entry(e: pymysql.Error) -> bool:
return e.args[0] == ER.DUP_ENTRY
@staticmethod
def is_access_denied(e: pymysql.Error) -> bool:
return e.args[0] == ER.ACCESS_DENIED_ERROR
@staticmethod
def cant_drop_field_or_key(e: pymysql.Error) -> bool:
return e.args[0] == ER.CANT_DROP_FIELD_OR_KEY
@staticmethod
def is_syntax_error(e: pymysql.Error) -> bool:
return e.args[0] == ER.PARSE_ERROR
@staticmethod
def is_data_too_long(e: pymysql.Error) -> bool:
return e.args[0] == ER.DATA_TOO_LONG
@staticmethod
def is_primary_key_violation(e: pymysql.Error) -> bool:
return (
MariaDBDatabase.is_duplicate_entry(e)
and "PRIMARY" in cstr(e.args[1])
and isinstance(e, pymysql.IntegrityError)
)
@staticmethod
def is_unique_key_violation(e: pymysql.Error) -> bool:
return (
MariaDBDatabase.is_duplicate_entry(e)
and "Duplicate" in cstr(e.args[1])
and isinstance(e, pymysql.IntegrityError)
)
class MariaDBConnectionUtil:
def get_connection(self):
conn = self._get_connection()
conn.auto_reconnect = True
return conn
def _get_connection(self):
"""Return MariaDB connection object."""
return self.create_connection()
def create_connection(self):
return pymysql.connect(**self.get_connection_settings())
def get_connection_settings(self) -> dict:
conn_settings = {
"host": self.host,
"user": self.user,
"password": self.password,
"conv": self.CONVERSION_MAP,
"charset": "utf8mb4",
"use_unicode": True,
}
if self.user != "root":
conn_settings["database"] = self.user
if self.port:
conn_settings["port"] = int(self.port)
if frappe.conf.local_infile:
conn_settings["local_infile"] = frappe.conf.local_infile
if frappe.conf.db_ssl_ca and frappe.conf.db_ssl_cert and frappe.conf.db_ssl_key:
ssl_params = {
"ca": frappe.conf.db_ssl_ca,
"cert": frappe.conf.db_ssl_cert,
"key": frappe.conf.db_ssl_key,
}
conn_settings |= ssl_params
return conn_settings
class MariaDBDatabase(MariaDBConnectionUtil, MariaDBExceptionUtil, Database):
REGEX_CHARACTER = "regexp"
# NOTE: using a very small cache - as during backup, if the sequence was used in anyform,
@ -25,6 +138,12 @@ class MariaDBDatabase(Database):
# using the system after a restore.
# issue link: https://jira.mariadb.org/browse/MDEV-21786
SEQUENCE_CACHE = 50
CONVERSION_MAP = conversions | {
FIELD_TYPE.NEWDECIMAL: float,
FIELD_TYPE.DATETIME: get_datetime,
UnicodeWithAttrs: escape_string,
}
default_port = "3306"
def setup_type_map(self):
self.db_type = "mariadb"
@ -65,44 +184,6 @@ class MariaDBDatabase(Database):
"JSON": ("json", ""),
}
def get_connection(self):
usessl = 0
if frappe.conf.db_ssl_ca and frappe.conf.db_ssl_cert and frappe.conf.db_ssl_key:
usessl = 1
ssl_params = {
"ca": frappe.conf.db_ssl_ca,
"cert": frappe.conf.db_ssl_cert,
"key": frappe.conf.db_ssl_key,
}
conversions.update(
{
FIELD_TYPE.NEWDECIMAL: float,
FIELD_TYPE.DATETIME: get_datetime,
UnicodeWithAttrs: conversions[str],
}
)
conn = pymysql.connect(
user=self.user or "",
password=self.password or "",
host=self.host,
port=self.port,
charset="utf8mb4",
use_unicode=True,
ssl=ssl_params if usessl else None,
conv=conversions,
local_infile=frappe.conf.local_infile,
)
# MYSQL_OPTION_MULTI_STATEMENTS_OFF = 1
# # self._conn.set_server_option(MYSQL_OPTION_MULTI_STATEMENTS_OFF)
if self.user != "root":
conn.select_db(self.user)
return conn
def get_database_size(self):
"""'Returns database size in MB"""
db_size = self.sql(
@ -117,9 +198,18 @@ class MariaDBDatabase(Database):
return db_size[0].get("database_size")
def log_query(self, query, values, debug, explain):
self.last_query = self._cursor._last_executed
self._log_query(query, debug, explain)
return self.last_query
@staticmethod
def escape(s, percent=True):
"""Excape quotes and percent in given string."""
# Update: We've scrapped PyMySQL in favour of MariaDB's official Python client
# Also, given we're promoting use of the PyPika builder via frappe.qb, the use
# of this method should be limited.
# pymysql expects unicode argument to escape_string with Python 3
s = frappe.as_unicode(escape_string(frappe.as_unicode(s)), "utf-8").replace("`", "\\`")
@ -140,7 +230,7 @@ class MariaDBDatabase(Database):
@staticmethod
def is_type_datetime(code):
return code in (pymysql.DATE, pymysql.DATETIME)
return code == pymysql.DATETIME
def rename_table(self, old_name: str, new_name: str) -> list | tuple:
old_name = get_table_name(old_name)
@ -158,57 +248,6 @@ class MariaDBDatabase(Database):
null_constraint = "NOT NULL" if not nullable else ""
return self.sql_ddl(f"ALTER TABLE `{table_name}` MODIFY `{column}` {type} {null_constraint}")
# exception types
@staticmethod
def is_deadlocked(e):
return e.args[0] == ER.LOCK_DEADLOCK
@staticmethod
def is_timedout(e):
return e.args[0] == ER.LOCK_WAIT_TIMEOUT
@staticmethod
def is_table_missing(e):
return e.args[0] == ER.NO_SUCH_TABLE
@staticmethod
def is_missing_table(e):
return MariaDBDatabase.is_table_missing(e)
@staticmethod
def is_missing_column(e):
return e.args[0] == ER.BAD_FIELD_ERROR
@staticmethod
def is_duplicate_fieldname(e):
return e.args[0] == ER.DUP_FIELDNAME
@staticmethod
def is_duplicate_entry(e):
return e.args[0] == ER.DUP_ENTRY
@staticmethod
def is_access_denied(e):
return e.args[0] == ER.ACCESS_DENIED_ERROR
@staticmethod
def cant_drop_field_or_key(e):
return e.args[0] == ER.CANT_DROP_FIELD_OR_KEY
@staticmethod
def is_syntax_error(e):
return e.args[0] == ER.PARSE_ERROR
@staticmethod
def is_data_too_long(e):
return e.args[0] == ER.DATA_TOO_LONG
def is_primary_key_violation(self, e):
return self.is_duplicate_entry(e) and "PRIMARY" in cstr(e.args[1])
def is_unique_key_violation(self, e):
return self.is_duplicate_entry(e) and "Duplicate" in cstr(e.args[1])
def create_auth_table(self):
self.sql_ddl(
"""create table if not exists `__Auth` (
@ -250,22 +289,6 @@ class MariaDBDatabase(Database):
) ENGINE=InnoDB DEFAULT CHARSET=utf8"""
)
def create_help_table(self):
self.sql(
"""create table help(
path varchar(255),
content text,
title text,
intro text,
full_path text,
fulltext(title),
fulltext(content),
index (path))
COLLATE=utf8mb4_unicode_ci
ENGINE=MyISAM
CHARACTER SET=utf8mb4"""
)
@staticmethod
def get_on_duplicate_update(key=None):
return "ON DUPLICATE key UPDATE "
@ -351,5 +374,26 @@ class MariaDBDatabase(Database):
db_table.sync()
self.begin()
def get_database_list(self, target):
return [d[0] for d in self.sql("SHOW DATABASES;")]
def get_database_list(self):
return self.sql("SHOW DATABASES", pluck=True)
def get_tables(self, cached=True):
"""Returns list of tables"""
to_query = not cached
if cached:
tables = frappe.cache().get_value("db_tables")
to_query = not tables
if to_query:
information_schema = frappe.qb.Schema("information_schema")
tables = (
frappe.qb.from_(information_schema.tables)
.select(information_schema.tables.table_name)
.where(information_schema.tables.table_schema != "information_schema")
.run(pluck=True)
)
frappe.cache().set_value("db_tables", tables)
return tables

View file

@ -83,9 +83,9 @@ def setup_help_database(help_db_name):
def drop_user_and_database(db_name, root_login, root_password):
frappe.local.db = get_root_connection(root_login, root_password)
dbman = DbManager(frappe.local.db)
dbman.drop_database(db_name)
dbman.delete_user(db_name, host="%")
dbman.delete_user(db_name)
dbman.drop_database(db_name)
def bootstrap_database(db_name, verbose, source_sql=None):
@ -131,7 +131,7 @@ def check_database_settings():
else:
expected_variables = expected_settings_10_3_later
mariadb_variables = frappe._dict(frappe.db.sql("""show variables"""))
mariadb_variables = frappe._dict(frappe.db.sql("show variables"))
# Check each expected value vs. actuals:
result = True
for key, expected_value in expected_variables.items():
@ -142,16 +142,19 @@ def check_database_settings():
)
result = False
if not result:
site = frappe.local.site
msg = (
"Creation of your site - {x} failed because MariaDB is not properly {sep}"
"configured. If using version 10.2.x or earlier, make sure you use the {sep}"
"the Barracuda storage engine. {sep}{sep}"
"Please verify the settings above in MariaDB's my.cnf. Restart MariaDB. And {sep}"
"then run `bench new-site {x}` again.{sep2}"
""
).format(x=site, sep2="\n" * 2, sep="\n")
print_db_config(msg)
print(
(
"=" * 80 + "\n"
"Creation of your site - {x} failed because MariaDB is not properly {sep}"
"configured. If using version 10.2.x or earlier, make sure you use the {sep}"
"the Barracuda storage engine. {sep}{sep}"
"Please verify the settings above in MariaDB's my.cnf. Restart MariaDB. And {sep}"
"then run `bench new-site {x}` again.{sep2}"
""
"=" * 80
).format(x=frappe.local.site, sep2="\n" * 2, sep="\n")
)
return result
@ -173,9 +176,3 @@ def get_root_connection(root_login, root_password):
)
return frappe.local.flags.root_connection
def print_db_config(explanation):
print("=" * 80)
print(explanation)
print("=" * 80)

View file

@ -2,12 +2,23 @@ import re
import psycopg2
import psycopg2.extensions
from psycopg2.errorcodes import STRING_DATA_RIGHT_TRUNCATION
from psycopg2.errorcodes import (
CLASS_INTEGRITY_CONSTRAINT_VIOLATION,
DEADLOCK_DETECTED,
DUPLICATE_COLUMN,
INSUFFICIENT_PRIVILEGE,
STRING_DATA_RIGHT_TRUNCATION,
UNDEFINED_COLUMN,
UNDEFINED_TABLE,
UNIQUE_VIOLATION,
)
from psycopg2.errors import SequenceGeneratorLimitExceeded, SyntaxError
from psycopg2.extensions import ISOLATION_LEVEL_REPEATABLE_READ
import frappe
from frappe.database.database import Database
from frappe.database.postgres.schema import PostgresTable
from frappe.database.utils import EmptyQueryValues, LazyDecode
from frappe.utils import cstr, get_table_name
# cast decimals as floats
@ -25,7 +36,7 @@ PG_TRANSFORM_PATTERN = re.compile(r"([=><]+)\s*([+-]?\d+)(\.0)?(?![a-zA-Z\.\d])"
FROM_TAB_PATTERN = re.compile(r"from tab([\w-]*)", flags=re.IGNORECASE)
class PostgresDatabase(Database):
class PostgresExceptionUtil:
ProgrammingError = psycopg2.ProgrammingError
TableMissingError = psycopg2.ProgrammingError
OperationalError = psycopg2.OperationalError
@ -33,6 +44,63 @@ class PostgresDatabase(Database):
SQLError = psycopg2.ProgrammingError
DataError = psycopg2.DataError
InterfaceError = psycopg2.InterfaceError
SequenceGeneratorLimitExceeded = SequenceGeneratorLimitExceeded
@staticmethod
def is_deadlocked(e):
return getattr(e, "pgcode", None) == DEADLOCK_DETECTED
@staticmethod
def is_timedout(e):
# http://initd.org/psycopg/docs/extensions.html?highlight=datatype#psycopg2.extensions.QueryCanceledError
return isinstance(e, psycopg2.extensions.QueryCanceledError)
@staticmethod
def is_syntax_error(e):
return isinstance(e, SyntaxError)
@staticmethod
def is_table_missing(e):
return getattr(e, "pgcode", None) == UNDEFINED_TABLE
@staticmethod
def is_missing_table(e):
return PostgresDatabase.is_table_missing(e)
@staticmethod
def is_missing_column(e):
return getattr(e, "pgcode", None) == UNDEFINED_COLUMN
@staticmethod
def is_access_denied(e):
return getattr(e, "pgcode", None) == INSUFFICIENT_PRIVILEGE
@staticmethod
def cant_drop_field_or_key(e):
return getattr(e, "pgcode", None) == CLASS_INTEGRITY_CONSTRAINT_VIOLATION
@staticmethod
def is_duplicate_entry(e):
return getattr(e, "pgcode", None) == UNIQUE_VIOLATION
@staticmethod
def is_primary_key_violation(e):
return getattr(e, "pgcode", None) == UNIQUE_VIOLATION and "_pkey" in cstr(e.args[0])
@staticmethod
def is_unique_key_violation(e):
return getattr(e, "pgcode", None) == UNIQUE_VIOLATION and "_key" in cstr(e.args[0])
@staticmethod
def is_duplicate_fieldname(e):
return getattr(e, "pgcode", None) == DUPLICATE_COLUMN
@staticmethod
def is_data_too_long(e):
return getattr(e, "pgcode", None) == STRING_DATA_RIGHT_TRUNCATION
class PostgresDatabase(PostgresExceptionUtil, Database):
REGEX_CHARACTER = "~"
# NOTE; The sequence cache for postgres is per connection.
@ -40,6 +108,7 @@ class PostgresDatabase(Database):
# to the next non-cached value hence not using cache in postgres.
# ref: https://stackoverflow.com/questions/21356375/postgres-9-0-4-sequence-skipping-numbers
SEQUENCE_CACHE = 0
default_port = "5432"
def setup_type_map(self):
self.db_type = "postgres"
@ -80,6 +149,10 @@ class PostgresDatabase(Database):
"JSON": ("json", ""),
}
@property
def last_query(self):
return LazyDecode(self._cursor.query)
def get_connection(self):
conn = psycopg2.connect(
"host='{}' dbname='{}' user='{}' password='{}' port={}".format(
@ -116,9 +189,12 @@ class PostgresDatabase(Database):
return db_size[0].get("database_size")
# pylint: disable=W0221
def sql(self, query, values=(), *args, **kwargs):
def sql(self, query, values=EmptyQueryValues, *args, **kwargs):
return super().sql(modify_query(query), modify_values(values), *args, **kwargs)
def lazy_mogrify(self, *args, **kwargs) -> str:
return self.last_query
def get_tables(self, cached=True):
return [
d[0]
@ -151,60 +227,6 @@ class PostgresDatabase(Database):
def is_type_datetime(code):
return code == psycopg2.DATETIME
# exception type
@staticmethod
def is_deadlocked(e):
return e.pgcode == "40P01"
@staticmethod
def is_timedout(e):
# http://initd.org/psycopg/docs/extensions.html?highlight=datatype#psycopg2.extensions.QueryCanceledError
return isinstance(e, psycopg2.extensions.QueryCanceledError)
@staticmethod
def is_syntax_error(e):
return isinstance(e, psycopg2.errors.SyntaxError)
@staticmethod
def is_table_missing(e):
return getattr(e, "pgcode", None) == "42P01"
@staticmethod
def is_missing_table(e):
return PostgresDatabase.is_table_missing(e)
@staticmethod
def is_missing_column(e):
return getattr(e, "pgcode", None) == "42703"
@staticmethod
def is_access_denied(e):
return e.pgcode == "42501"
@staticmethod
def cant_drop_field_or_key(e):
return e.pgcode.startswith("23")
@staticmethod
def is_duplicate_entry(e):
return e.pgcode == "23505"
@staticmethod
def is_primary_key_violation(e):
return getattr(e, "pgcode", None) == "23505" and "_pkey" in cstr(e.args[0])
@staticmethod
def is_unique_key_violation(e):
return getattr(e, "pgcode", None) == "23505" and "_key" in cstr(e.args[0])
@staticmethod
def is_duplicate_fieldname(e):
return e.pgcode == "42701"
@staticmethod
def is_data_too_long(e):
return e.pgcode == STRING_DATA_RIGHT_TRUNCATION
def rename_table(self, old_name: str, new_name: str) -> list | tuple:
old_name = get_table_name(old_name)
new_name = get_table_name(new_name)
@ -269,17 +291,6 @@ class PostgresDatabase(Database):
)"""
)
def create_help_table(self):
self.sql(
"""CREATE TABLE "help"(
"path" varchar(255),
"content" text,
"title" text,
"intro" text,
"full_path" text)"""
)
self.sql("""CREATE INDEX IF NOT EXISTS "help_index" ON "help" ("path")""")
def updatedb(self, doctype, meta=None):
"""
Syncs a `DocType` to the table
@ -377,8 +388,8 @@ class PostgresDatabase(Database):
as_dict=1,
)
def get_database_list(self, target):
return [d[0] for d in self.sql("SELECT datname FROM pg_database;")]
def get_database_list(self):
return self.sql("SELECT datname FROM pg_database", pluck=True)
def modify_query(query):
@ -409,7 +420,7 @@ def modify_values(values):
return value
if not values:
if not values or values == EmptyQueryValues:
return values
if isinstance(values, dict):

View file

@ -1,16 +1,21 @@
import operator
import re
from ast import literal_eval
from functools import cached_property
from typing import Any, Callable
from types import BuiltinFunctionType
from typing import TYPE_CHECKING, Any, Callable
import frappe
from frappe import _
from frappe.boot import get_additional_filters_from_hooks
from frappe.model.db_query import get_timespan_date_range
from frappe.query_builder import Criterion, Field, Order, Table
from frappe.query_builder import Criterion, Field, Order, Table, functions
from frappe.query_builder.functions import Function, SqlFunctions
TAB_PATTERN = re.compile("^tab")
WORDS_PATTERN = re.compile(r"\w+")
BRACKETS_PATTERN = re.compile(r"\(.*?\)|$")
SQL_FUNCTIONS = [sql_function.value for sql_function in SqlFunctions]
COMMA_PATTERN = re.compile(r",\s*(?![^()]*\))")
def like(key: Field, value: str) -> frappe.qb:
@ -93,7 +98,7 @@ def func_between(key: Field, value: list | tuple) -> frappe.qb:
def func_is(key, value):
"Wrapper for IS"
return Field(key).isnotnull() if value.lower() == "set" else Field(key).isnull()
return key.isnotnull() if value.lower() == "set" else key.isnull()
def func_timespan(key: Field, value: str) -> frappe.qb:
@ -143,6 +148,13 @@ def change_orderby(order: str):
return order[0], Order.desc
def literal_eval_(literal):
try:
return literal_eval(literal)
except (ValueError, SyntaxError):
return literal
# default operators
OPERATOR_MAP: dict[str, Callable] = {
"+": operator.add,
@ -155,6 +167,8 @@ OPERATOR_MAP: dict[str, Callable] = {
"=<": operator.le,
">=": operator.ge,
"=>": operator.ge,
"/": operator.truediv,
"*": operator.mul,
"in": func_in,
"not in": func_not_in,
"like": like,
@ -168,27 +182,24 @@ OPERATOR_MAP: dict[str, Callable] = {
}
class Query:
tables: dict = {}
class Engine:
tables: dict[str, str] = {}
@cached_property
def OPERATOR_MAP(self):
from frappe.boot import get_additional_filters_from_hooks
# default operators
all_operators = OPERATOR_MAP.copy()
# update with site-specific custom operators
additional_filters_config = get_additional_filters_from_hooks()
if additional_filters_config:
# TODO: update with site-specific custom operators / removed previous buggy implementation
if frappe.get_hooks("filters_config"):
from frappe.utils.commands import warn
warn("'filters_config' hook is not completely implemented yet in frappe.db.query engine")
for _operator, function in additional_filters_config.items():
if callable(function):
all_operators.update({_operator.casefold(): function})
elif isinstance(function, dict):
all_operators[_operator.casefold()] = frappe.get_attr(function.get("get_field"))()["operator"]
warn(
"The 'filters_config' hook used to add custom operators is not yet implemented"
" in frappe.db.query engine. Use db_query (frappe.get_list) instead."
)
return all_operators
@ -238,7 +249,7 @@ class Query:
Returns:
conditions (frappe.qb): frappe.qb object
"""
if kwargs.get("orderby"):
if kwargs.get("orderby") and kwargs.get("orderby") != "KEEP_DEFAULT_ORDERING":
orderby = kwargs.get("orderby")
if isinstance(orderby, str) and len(orderby.split()) > 1:
for ordby in orderby.split(","):
@ -250,6 +261,7 @@ class Query:
if kwargs.get("limit"):
conditions = conditions.limit(kwargs.get("limit"))
conditions = conditions.offset(kwargs.get("offset", 0))
if kwargs.get("distinct"):
conditions = conditions.distinct()
@ -257,6 +269,9 @@ class Query:
if kwargs.get("for_update"):
conditions = conditions.for_update()
if kwargs.get("groupby"):
conditions = conditions.groupby(kwargs.get("groupby"))
return conditions
def misc_query(self, table: str, filters: list | tuple = None, **kwargs):
@ -306,6 +321,10 @@ class Query:
conditions = self.add_conditions(conditions, **kwargs)
return conditions
for key, value in filters.items():
if isinstance(value, bool):
filters.update({key: str(int(value))})
for key in filters:
value = filters.get(key)
_operator = self.OPERATOR_MAP["="]
@ -315,7 +334,8 @@ class Query:
continue
if isinstance(value, (list, tuple)):
_operator = self.OPERATOR_MAP[value[0].casefold()]
conditions = conditions.where(_operator(Field(key), value[1]))
_value = value[1] if value[1] else ("",)
conditions = conditions.where(_operator(Field(key), _value))
else:
if value is not None:
conditions = conditions.where(_operator(Field(key), value))
@ -352,7 +372,138 @@ class Query:
return criterion
def get_sql(
def get_function_object(self, field: str) -> "Function":
"""Expects field to look like 'SUM(*)' or 'name' or something similar. Returns PyPika Function object"""
func = field.split("(", maxsplit=1)[0].capitalize()
args_start, args_end = len(func) + 1, field.index(")")
args = field[args_start:args_end].split(",")
_, alias = field.split(" as ") if " as " in field else (None, None)
to_cast = "*" not in args
_args = []
for arg in args:
initial_fields = literal_eval_(arg.strip())
if to_cast:
has_primitive_operator = False
for _operator in OPERATOR_MAP.keys():
if _operator in initial_fields:
operator_mapping = OPERATOR_MAP[_operator]
# Only perform this if operator is of primitive type.
if isinstance(operator_mapping, BuiltinFunctionType):
has_primitive_operator = True
field = operator_mapping(
*map(lambda field: Field(field.strip()), arg.split(_operator)),
)
field = Field(initial_fields) if not has_primitive_operator else field
else:
field = initial_fields
_args.append(field)
try:
return getattr(functions, func)(*_args, alias=alias or None)
except AttributeError:
# Fall back for functions not present in `SqlFunctions``
return Function(func, *_args, alias=alias or None)
def function_objects_from_string(self, fields):
fields = list(map(lambda str: str.strip(), COMMA_PATTERN.split(fields)))
return self.function_objects_from_list(fields=fields)
def function_objects_from_list(self, fields):
functions = []
for field in fields:
field = field.casefold() if isinstance(field, str) else field
if not issubclass(type(field), Criterion):
if any([f"{func}(" in field for func in SQL_FUNCTIONS]) or "(" in field:
functions.append(field)
return [self.get_function_object(function) for function in functions]
def remove_string_functions(self, fields, function_objects):
"""Remove string functions from fields which have already been converted to function objects"""
for function in function_objects:
if isinstance(fields, str):
if function.alias:
fields = fields.replace(" as " + function.alias.casefold(), "")
fields = BRACKETS_PATTERN.sub("", fields.replace(function.name.casefold(), ""))
# Check if only comma is left in fields after stripping functions.
if "," in fields and (len(fields.strip()) == 1):
fields = ""
else:
updated_fields = []
for field in fields:
if isinstance(field, str):
if function.alias:
field = field.replace(" as " + function.alias.casefold(), "")
field = (
BRACKETS_PATTERN.sub("", field).strip().casefold().replace(function.name.casefold(), "")
)
updated_fields.append(field)
fields = [field for field in updated_fields if field]
return fields
def set_fields(self, fields, **kwargs):
fields = kwargs.get("pluck") if kwargs.get("pluck") else fields or "name"
if isinstance(fields, list) and None in fields and Field not in fields:
return None
function_objects = []
is_list = isinstance(fields, (list, tuple, set))
if is_list and len(fields) == 1:
fields = fields[0]
is_list = False
if is_list:
function_objects += self.function_objects_from_list(fields=fields)
is_str = isinstance(fields, str)
if is_str:
fields = fields.casefold()
function_objects += self.function_objects_from_string(fields=fields)
fields = self.remove_string_functions(fields, function_objects)
if is_str and "," in fields:
fields = [field.replace(" ", "") if "as" not in field else field for field in fields.split(",")]
is_list, is_str = True, False
if is_str:
if fields == "*":
return fields
if " as " in fields:
fields, reference = fields.split(" as ")
fields = Field(fields).as_(reference)
if not is_str and fields:
if issubclass(type(fields), Criterion):
return fields
updated_fields = []
if "*" in fields:
return fields
for field in fields:
if not isinstance(field, Criterion) and field:
if " as " in field:
field, reference = field.split(" as ")
updated_fields.append(Field(field.strip()).as_(reference))
else:
updated_fields.append(Field(field))
fields = updated_fields
# Need to check instance again since fields modified.
if not isinstance(fields, (list, tuple, set)):
fields = [fields] if fields else []
fields.extend(function_objects)
return fields
def get_query(
self,
table: str,
fields: list | tuple,
@ -362,15 +513,20 @@ class Query:
# Clean up state before each query
self.tables = {}
criterion = self.build_conditions(table, filters, **kwargs)
fields = self.set_fields(kwargs.get("field_objects") or fields, **kwargs)
join = kwargs.get("join").replace(" ", "_") if kwargs.get("join") else "left_join"
if len(self.tables) > 1:
primary_table = self.tables[table]
del self.tables[table]
for table_object in self.tables.values():
criterion = criterion.left_join(table_object).on(table_object.parent == primary_table.name)
criterion = getattr(criterion, join)(table_object).on(
table_object.parent == primary_table.name
)
if isinstance(fields, (list, tuple)):
query = criterion.select(*kwargs.get("field_objects", fields))
query = criterion.select(*fields)
elif isinstance(fields, Criterion):
query = criterion.select(fields)

View file

@ -57,12 +57,17 @@ def create_sequence(
def get_next_val(doctype_name: str, slug: str = "_id_seq") -> int:
return db.multisql(
{
"postgres": f"select nextval('\"{scrub(doctype_name + slug)}\"')",
"mariadb": f"select nextval(`{scrub(doctype_name + slug)}`)",
}
)[0][0]
sequence_name = scrub(f"{doctype_name}{slug}")
if db.db_type == "postgres":
sequence_name = f"'\"{sequence_name}\"'"
elif db.db_type == "mariadb":
sequence_name = f"`{sequence_name}`"
try:
return db.sql(f"SELECT nextval({sequence_name})")[0][0]
except IndexError:
raise db.SequenceGeneratorLimitExceeded
def set_next_val(

54
frappe/database/utils.py Normal file
View file

@ -0,0 +1,54 @@
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
from functools import cached_property
from types import NoneType
import frappe
from frappe.query_builder.builder import MariaDB, Postgres
Query = str | MariaDB | Postgres
QueryValues = tuple | list | dict | NoneType
EmptyQueryValues = object()
FallBackDateTimeStr = "0001-01-01 00:00:00.000000"
def is_query_type(query: str, query_type: str | tuple[str]) -> bool:
return query.lstrip().split(maxsplit=1)[0].lower().startswith(query_type)
class LazyString:
def _setup(self) -> None:
raise NotImplementedError
@cached_property
def value(self) -> str:
return self._setup()
def __str__(self) -> str:
return self.value
def __repr__(self) -> str:
return f"'{self.value}'"
class LazyDecode(LazyString):
__slots__ = ()
def __init__(self, value: str) -> None:
self._value = value
def _setup(self) -> None:
return self._value.decode()
class LazyMogrify(LazyString):
__slots__ = ()
def __init__(self, query, values) -> None:
self.query = query
self.values = values
def _setup(self) -> str:
return frappe.db.mogrify(self.query, self.values)

View file

@ -85,18 +85,19 @@ def get_user_permissions(user=None):
def get_defaults(user=None):
globald = get_defaults_for()
global_defaults = get_defaults_for()
if not user:
user = frappe.session.user if frappe.session else "Guest"
if user:
userd = {}
userd.update(get_defaults_for(user))
userd.update({"user": user, "owner": user})
globald.update(userd)
if not user:
return global_defaults
return globald
defaults = global_defaults.copy()
defaults.update(get_defaults_for(user))
defaults.update(user=user, owner=user)
return defaults
def clear_user_default(key, user=None):
@ -241,8 +242,4 @@ def get_defaults_for(parent="__default"):
def _clear_cache(parent):
if parent in common_default_keys:
frappe.clear_cache()
else:
clear_notifications(user=parent)
frappe.clear_cache(user=parent)
frappe.clear_cache(user=parent if parent not in common_default_keys else None)

View file

@ -200,7 +200,7 @@ def get_cards_for_user(doctype, txt, searchfield, start, page_len, filters):
if txt:
search_conditions = [numberCard[field].like(f"%{txt}%") for field in searchfields]
condition_query = frappe.db.query.build_conditions(doctype, filters)
condition_query = frappe.qb.engine.build_conditions(doctype, filters)
return (
condition_query.select(numberCard.name, numberCard.label, numberCard.document_type)

View file

@ -4,45 +4,18 @@
import frappe
from frappe.deferred_insert import deferred_insert as _deferred_insert
from frappe.model.document import Document
from frappe.query_builder import DocType, Interval
from frappe.query_builder.functions import Count, Now
class RouteHistory(Document):
@staticmethod
def clear_old_logs(days=30):
from frappe.query_builder import Interval
from frappe.query_builder.functions import Now
table = frappe.qb.DocType("Route History")
frappe.db.delete(table, filters=(table.modified < (Now() - Interval(days=days))))
def flush_old_route_records():
"""Deletes all route records except last 500 records per user"""
records_to_keep_limit = 500
RouteHistory = DocType("Route History")
users = (
frappe.qb.from_(RouteHistory)
.select(RouteHistory.user)
.groupby(RouteHistory.user)
.having(Count(RouteHistory.name) > records_to_keep_limit)
).run(pluck=True)
for user in users:
last_record_to_keep = frappe.get_all(
"Route History",
filters={"user": user},
limit_start=500,
fields=["modified"],
order_by="modified desc",
limit=1,
)
frappe.db.delete(
"Route History",
{"modified": ("<=", last_record_to_keep[0].modified), "user": user},
)
@frappe.whitelist()
def deferred_insert(routes):
routes = [

View file

@ -36,7 +36,7 @@ def get_group_by_count(doctype: str, current_filters: str, field: str) -> list[d
ToDo = DocType("ToDo")
User = DocType("User")
count = Count("*").as_("count")
filtered_records = frappe.db.query.build_conditions(doctype, current_filters).select("name")
filtered_records = frappe.qb.engine.build_conditions(doctype, current_filters).select("name")
return (
frappe.qb.from_(ToDo)

View file

@ -225,7 +225,7 @@ def parse_json(data):
if isinstance(data.get("or_filters"), str):
data["or_filters"] = json.loads(data["or_filters"])
if isinstance(data.get("fields"), str):
data["fields"] = json.loads(data["fields"])
data["fields"] = ["*"] if data["fields"] == "*" else json.loads(data["fields"])
if isinstance(data.get("docstatus"), str):
data["docstatus"] = json.loads(data["docstatus"])
if isinstance(data.get("save_user_settings"), str):
@ -684,8 +684,7 @@ def build_match_conditions(doctype, user=None, as_condition=True):
)
if as_condition:
return match_conditions.replace("%", "%%")
else:
return match_conditions
return match_conditions
def get_filters_cond(

View file

@ -10,27 +10,26 @@ def sendmail_to_system_managers(subject, content):
@frappe.whitelist()
def get_contact_list(txt, page_length=20):
def get_contact_list(txt, page_length=20) -> list[dict]:
"""Returns contacts (from autosuggest)"""
cached_contacts = get_cached_contacts(txt)
if cached_contacts:
if cached_contacts := get_cached_contacts(txt):
return cached_contacts[:page_length]
match_conditions = build_match_conditions("Contact")
match_conditions = f"and {match_conditions}" if match_conditions else ""
reportview_conditions = build_match_conditions("Contact")
match_conditions = f"and {reportview_conditions}" if reportview_conditions else ""
out = frappe.db.sql(
"""select email_id as value,
f"""select email_id as value,
concat(first_name, ifnull(concat(' ',last_name), '' )) as description
from tabContact
where name like %(txt)s or email_id like %(txt)s
%(condition)s
{match_conditions}
limit %(page_length)s""",
{"txt": "%" + txt + "%", "condition": match_conditions, "page_length": page_length},
{"txt": f"%{txt}%", "page_length": page_length},
as_dict=True,
)
out = filter(None, out)
out = list(filter(None, out))
update_contact_cache(out)

View file

@ -1,6 +1,7 @@
frappe.email_defaults = {
"GMail": {
"email_server": "imap.gmail.com",
"incoming_port": 993,
"use_ssl": 1,
"enable_outgoing": 1,
"smtp_server": "smtp.gmail.com",
@ -66,6 +67,34 @@ frappe.email_defaults_pop = {
};
function oauth_access(frm) {
return frappe.call({
method: "frappe.email.oauth.oauth_access",
args: {
"email_account": frm.doc.name,
"service": frm.doc.service || ""
},
callback: function(r) {
if (!r.exc) {
window.open(r.message.url, "_self");
}
}
});
}
function set_default_max_attachment_size(frm, field) {
if (frm.doc.__islocal && !frm.doc[field]) {
frappe.call({
method: "frappe.core.api.file.get_max_file_size",
callback: function(r) {
if (!r.exc) {
frm.set_value(field, Number(r.message)/(1024*1024));
}
},
});
}
}
frappe.ui.form.on("Email Account", {
service: function(frm) {
$.each(frappe.email_defaults[frm.doc.service], function(key, value) {
@ -77,6 +106,7 @@ frappe.ui.form.on("Email Account", {
});
}
frm.events.show_gmail_message_for_less_secure_apps(frm);
frm.events.toggle_auth_method(frm);
},
use_imap: function(frm) {
@ -93,8 +123,6 @@ frappe.ui.form.on("Email Account", {
},
enable_incoming: function(frm) {
frm.doc.no_remaining = null; //perform full sync
//frm.set_df_property("append_to", "reqd", frm.doc.enable_incoming);
frm.trigger("warn_autoreply_on_incoming");
},
@ -107,6 +135,12 @@ frappe.ui.form.on("Email Account", {
},
onload: function(frm) {
if (frappe.utils.get_query_params().successful_authorization === '1') {
frappe.show_alert(__("Successfully Authorized"));
// FIXME: find better alternative
window.history.replaceState(null, "", window.location.pathname);
}
frm.set_df_property("append_to", "only_select", true);
frm.set_query("append_to", "frappe.email.doctype.email_account.email_account.get_append_to");
frm.set_query("append_to", "imap_folder", function() {
@ -118,6 +152,8 @@ frappe.ui.form.on("Email Account", {
frm.add_child("imap_folder", {"folder_name": "INBOX"});
frm.refresh_field("imap_folder");
}
frm.toggle_display(['auth_method'], frm.doc.service === "GMail");
set_default_max_attachment_size(frm, "attachment_limit");
},
refresh: function(frm) {
@ -125,6 +161,7 @@ frappe.ui.form.on("Email Account", {
frm.events.enable_incoming(frm);
frm.events.notify_if_unreplied(frm);
frm.events.show_gmail_message_for_less_secure_apps(frm);
frm.events.show_oauth_authorization_message(frm);
if (frappe.route_flags.delete_user_from_locals && frappe.route_flags.linked_user) {
delete frappe.route_flags.delete_user_from_locals;
@ -132,9 +169,24 @@ frappe.ui.form.on("Email Account", {
}
},
after_save(frm) {
if (frm.doc.auth_method === "OAuth" && !frm.doc.refresh_token) {
oauth_access(frm);
}
},
toggle_auth_method: function(frm) {
if (frm.doc.service !== "GMail") {
frm.toggle_display(['auth_method'], false);
frm.doc.auth_method = "Basic";
} else {
frm.toggle_display(['auth_method'], true);
}
},
show_gmail_message_for_less_secure_apps: function(frm) {
frm.dashboard.clear_headline();
let msg = __("GMail will only work if you enable 2-step authentication and use app-specific password.");
let msg = __("GMail will only work if you enable 2-step authentication and use app-specific password OR use OAuth.");
let cta = __("Read the step by step guide here.");
msg += ` <a target="_blank" href="https://docs.erpnext.com/docs/v13/user/manual/en/setting-up/email/email_account_setup_with_gmail">${cta}</a>`;
if (frm.doc.service==="GMail") {
@ -142,6 +194,18 @@ frappe.ui.form.on("Email Account", {
}
},
show_oauth_authorization_message(frm) {
if (frm.doc.auth_method === "OAuth" && !frm.doc.refresh_token) {
let msg = __('OAuth has been enabled but not authorised. Please use "Authorise API Access" button to do the same.')
frm.dashboard.clear_headline();
frm.dashboard.set_headline_alert(msg, "yellow");
}
},
authorize_api_access: function(frm) {
oauth_access(frm);
},
email_id:function(frm) {
//pull domain and if no matching domain go create one
frm.events.update_domain(frm);

View file

@ -14,10 +14,14 @@
"domain",
"service",
"authentication_column",
"auth_method",
"authorize_api_access",
"password",
"awaiting_password",
"ascii_encode_password",
"column_break_10",
"refresh_token",
"access_token",
"login_id_is_different",
"login_id",
"mailbox_settings",
@ -44,9 +48,9 @@
"send_notification_to",
"outgoing_mail_settings",
"enable_outgoing",
"smtp_server",
"use_tls",
"use_ssl_for_outgoing",
"smtp_server",
"smtp_port",
"column_break_38",
"default_outgoing",
@ -66,7 +70,6 @@
"brand_logo",
"uidvalidity",
"uidnext",
"no_remaining",
"no_failed"
],
"fields": [
@ -79,7 +82,8 @@
"in_list_view": 1,
"label": "Email Address",
"options": "Email",
"reqd": 1
"reqd": 1,
"unique": 1
},
{
"default": "0",
@ -87,7 +91,7 @@
"fieldtype": "Check",
"hide_days": 1,
"hide_seconds": 1,
"label": "Use different login"
"label": "Use different Email ID"
},
{
"depends_on": "login_id_is_different",
@ -95,9 +99,10 @@
"fieldtype": "Data",
"hide_days": 1,
"hide_seconds": 1,
"label": "Email Login ID"
"label": "Alternative Email ID"
},
{
"depends_on": "eval: doc.auth_method === \"Basic\"",
"fieldname": "password",
"fieldtype": "Password",
"hide_days": 1,
@ -106,6 +111,7 @@
},
{
"default": "0",
"depends_on": "eval: doc.auth_method === \"Basic\"",
"fieldname": "awaiting_password",
"fieldtype": "Check",
"hide_days": 1,
@ -114,6 +120,7 @@
},
{
"default": "0",
"depends_on": "eval: doc.auth_method === \"Basic\"",
"fieldname": "ascii_encode_password",
"fieldtype": "Check",
"hide_days": 1,
@ -182,7 +189,7 @@
"fieldtype": "Data",
"hide_days": 1,
"hide_seconds": 1,
"label": "Email Server"
"label": "Incoming Server"
},
{
"default": "0",
@ -304,7 +311,7 @@
"fieldtype": "Data",
"hide_days": 1,
"hide_seconds": 1,
"label": "SMTP Server"
"label": "Outgoing Server"
},
{
"default": "0",
@ -464,15 +471,6 @@
"label": "UIDNEXT",
"no_copy": 1
},
{
"fieldname": "no_remaining",
"fieldtype": "Data",
"hidden": 1,
"hide_days": 1,
"hide_seconds": 1,
"label": "No of emails remaining to be synced",
"no_copy": 1
},
{
"fieldname": "no_failed",
"fieldtype": "Int",
@ -524,7 +522,7 @@
"fieldtype": "Check",
"hide_days": 1,
"hide_seconds": 1,
"label": "Use SSL for Outgoing"
"label": "Use SSL"
},
{
"default": "1",
@ -576,12 +574,39 @@
"fieldname": "section_break_25",
"fieldtype": "Section Break",
"label": "IMAP Details"
},
{
"depends_on": "eval: doc.service === \"GMail\" && doc.auth_method === \"OAuth\" && !doc.__islocal && !doc.__unsaved",
"fieldname": "authorize_api_access",
"fieldtype": "Button",
"label": "Authorize API Access"
},
{
"fieldname": "refresh_token",
"fieldtype": "Small Text",
"hidden": 1,
"label": "Refresh Token",
"read_only": 1
},
{
"fieldname": "access_token",
"fieldtype": "Small Text",
"hidden": 1,
"label": "Access Token",
"read_only": 1
},
{
"default": "Basic",
"fieldname": "auth_method",
"fieldtype": "Select",
"label": "Method",
"options": "Basic\nOAuth"
}
],
"icon": "fa fa-inbox",
"index_web_pages_for_search": 1,
"links": [],
"modified": "2021-11-30 09:03:25.728637",
"modified": "2022-07-13 13:05:45.445572",
"modified_by": "Administrator",
"module": "Email",
"name": "Email Account",
@ -603,5 +628,6 @@
],
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}
}

View file

@ -20,6 +20,7 @@ from frappe.utils import cint, comma_or, cstr, parse_addr, validate_email_addres
from frappe.utils.background_jobs import enqueue, get_jobs
from frappe.utils.error import raise_error_on_no_output
from frappe.utils.jinja import render_template
from frappe.utils.password import decrypt, encrypt
from frappe.utils.user import get_system_managers
@ -63,6 +64,7 @@ class EmailAccount(Document):
def validate(self):
"""Validate Email Address and check POP3/IMAP and SMTP connections is enabled."""
if self.email_id:
validate_email_address(self.email_id, True)
@ -76,25 +78,25 @@ class EmailAccount(Document):
if self.enable_incoming and self.use_imap and len(self.imap_folder) <= 0:
frappe.throw(_("You need to set one IMAP folder for {0}").format(frappe.bold(self.email_id)))
duplicate_email_account = frappe.get_all(
"Email Account", filters={"email_id": self.email_id, "name": ("!=", self.name)}
)
if duplicate_email_account:
frappe.throw(
_("Email ID must be unique, Email Account already exists for {0}").format(
frappe.bold(self.email_id)
)
)
if frappe.local.flags.in_patch or frappe.local.flags.in_test:
return
if (
not self.awaiting_password
and not frappe.local.flags.in_install
and not frappe.local.flags.in_patch
):
if self.password or self.smtp_server in ("127.0.0.1", "localhost"):
use_oauth = self.auth_method == "OAuth"
if getattr(self, "service", "") != "GMail" and use_oauth:
self.auth_method = "Basic"
use_oauth = False
if use_oauth:
# no need for awaiting password for oauth
self.awaiting_password = 0
elif self.refresh_token:
# clear access & refresh token
self.refresh_token = self.access_token = None
if not frappe.local.flags.in_install and not self.awaiting_password:
if self.refresh_token or self.password or self.smtp_server in ("127.0.0.1", "localhost"):
if self.enable_incoming:
self.get_incoming_server()
self.no_failed = 0
@ -103,7 +105,8 @@ class EmailAccount(Document):
self.validate_smtp_conn()
else:
if self.enable_incoming or (self.enable_outgoing and not self.no_smtp_authentication):
frappe.throw(_("Password is required or select Awaiting Password"))
if not use_oauth:
frappe.throw(_("Password is required or select Awaiting Password"))
if self.notify_if_unreplied:
if not self.send_notification_to:
@ -111,11 +114,12 @@ class EmailAccount(Document):
for e in self.get_unreplied_notification_emails():
validate_email_address(e, True)
for folder in self.imap_folder:
if self.enable_incoming and folder.append_to:
valid_doctypes = [d[0] for d in get_append_to()]
if folder.append_to not in valid_doctypes:
frappe.throw(_("Append To can be one of {0}").format(comma_or(valid_doctypes)))
if self.enable_incoming:
for folder in self.imap_folder:
if folder.append_to:
valid_doctypes = [d[0] for d in get_append_to()]
if folder.append_to not in valid_doctypes:
frappe.throw(_("Append To can be one of {0}").format(comma_or(valid_doctypes)))
def validate_smtp_conn(self):
if not self.smtp_server:
@ -155,6 +159,7 @@ class EmailAccount(Document):
awaiting_password=self.awaiting_password,
email_id=self.email_id,
enable_outgoing=self.enable_outgoing,
used_oauth=self.auth_method == "OAuth",
)
def there_must_be_only_one_default(self):
@ -204,10 +209,14 @@ class EmailAccount(Document):
"host": self.email_server,
"use_ssl": self.use_ssl,
"username": getattr(self, "login_id", None) or self.email_id,
"service": getattr(self, "service", ""),
"use_imap": self.use_imap,
"email_sync_rule": email_sync_rule,
"incoming_port": get_port(self),
"initial_sync_count": self.initial_sync_count or 100,
"use_oauth": self.auth_method == "OAuth",
"refresh_token": decrypt(self.refresh_token) if self.refresh_token else None,
"access_token": decrypt(self.access_token) if self.access_token else None,
}
)
@ -274,7 +283,9 @@ class EmailAccount(Document):
@property
def _password(self):
raise_exception = not (self.no_smtp_authentication or frappe.flags.in_test)
raise_exception = not (
self.auth_method == "OAuth" or self.no_smtp_authentication or frappe.flags.in_test
)
return self.get_password(raise_exception=raise_exception)
@property
@ -393,6 +404,9 @@ class EmailAccount(Document):
"default": 0,
},
"name": {"conf_names": ("email_sender_name",), "default": "Frappe"},
"auth_method": {"conf_names": ("auth_method"), "default": "Basic"},
"access_token": {"conf_names": ("mail_access_token")},
"refresh_token": {"conf_names": ("mail_refresh_token")},
"from_site_config": {"default": True},
}
@ -400,17 +414,27 @@ class EmailAccount(Document):
for doc_field_name, d in field_to_conf_name_map.items():
conf_names, default = d.get("conf_names") or [], d.get("default")
value = [frappe.conf.get(k) for k in conf_names if frappe.conf.get(k)]
account_details[doc_field_name] = (value and value[0]) or default
if doc_field_name in ("refresh_token", "access_token"):
account_details[doc_field_name] = value and encrypt(value[0])
else:
account_details[doc_field_name] = (value and value[0]) or default
return account_details
def sendmail_config(self):
return {
"email_account": self.name,
"server": self.smtp_server,
"port": cint(self.smtp_port),
"login": getattr(self, "login_id", None) or self.email_id,
"password": self._password,
"use_ssl": cint(self.use_ssl_for_outgoing),
"use_tls": cint(self.use_tls),
"service": getattr(self, "service", ""),
"use_oauth": self.auth_method == "OAuth",
"refresh_token": decrypt(self.refresh_token) if self.refresh_token else None,
"access_token": decrypt(self.access_token) if self.access_token else None,
}
def get_smtp_server(self):
@ -494,7 +518,7 @@ class EmailAccount(Document):
seen_status = messages.get("seen_status", {}).get(uid)
if self.email_sync_option != "UNSEEN" or seen_status != "SEEN":
# only append the emails with status != 'SEEN' if sync option is set to 'UNSEEN'
mails.append(InboundMail(message, self, uid, seen_status, append_to))
mails.append(InboundMail(message, self, frappe.safe_decode(uid), seen_status, append_to))
if not self.enable_incoming:
return []
@ -771,15 +795,25 @@ def notify_unreplied():
def pull(now=False):
"""Will be called via scheduler, pull emails from all enabled Email accounts."""
if frappe.cache().get_value("workers:no-internet") == True:
if test_internet():
frappe.cache().set_value("workers:no-internet", False)
else:
return
queued_jobs = get_jobs(site=frappe.local.site, key="job_name")[frappe.local.site]
for email_account in frappe.get_list(
"Email Account", filters={"enable_incoming": 1, "awaiting_password": 0}
):
doctype = frappe.qb.DocType("Email Account")
email_accounts = (
frappe.qb.from_(doctype)
.select(doctype.name)
.where(doctype.enable_incoming == 1)
.where(
(doctype.awaiting_password == 0)
| ((doctype.auth_method == "OAuth") & (doctype.refresh_token.isnotnull()))
)
.run(as_dict=1)
)
for email_account in email_accounts:
if now:
pull_from_email_account(email_account.name)
@ -787,6 +821,7 @@ def pull(now=False):
# job_name is used to prevent duplicates in queue
job_name = f"pull_from_email_account|{email_account.name}"
queued_jobs = get_jobs(site=frappe.local.site, key="job_name")[frappe.local.site]
if job_name not in queued_jobs:
enqueue(
pull_from_email_account,
@ -824,7 +859,9 @@ def get_max_email_uid(email_account):
return max_uid
def setup_user_email_inbox(email_account, awaiting_password, email_id, enable_outgoing):
def setup_user_email_inbox(
email_account, awaiting_password, email_id, enable_outgoing, used_oauth
):
"""setup email inbox for user"""
from frappe.core.doctype.user.user import ask_pass_update
@ -835,6 +872,7 @@ def setup_user_email_inbox(email_account, awaiting_password, email_id, enable_ou
row.email_id = email_id
row.email_account = email_account
row.awaiting_password = awaiting_password or 0
row.used_oauth = used_oauth or 0
row.enable_outgoing = enable_outgoing or 0
user.save(ignore_permissions=True)
@ -867,8 +905,10 @@ def setup_user_email_inbox(email_account, awaiting_password, email_id, enable_ou
if update_user_email_settings:
UserEmail = frappe.qb.DocType("User Email")
frappe.qb.update(UserEmail).set(UserEmail.awaiting_password, (awaiting_password or 0)).set(
UserEmail.enable_outgoing, enable_outgoing
).where(UserEmail.email_account == email_account).run()
UserEmail.enable_outgoing, (enable_outgoing or 0)
).set(UserEmail.used_oauth, (used_oauth or 0)).where(
UserEmail.email_account == email_account
).run()
else:
users = " and ".join([frappe.bold(user.get("name")) for user in user_names])
@ -893,10 +933,10 @@ def remove_user_email_inbox(email_account):
doc.save(ignore_permissions=True)
@frappe.whitelist(allow_guest=False)
def set_email_password(email_account, user, password):
@frappe.whitelist()
def set_email_password(email_account, password):
account = frappe.get_doc("Email Account", email_account)
if account.awaiting_password:
if account.awaiting_password and not account.auth_method == "OAuth":
account.awaiting_password = 0
account.password = password
try:

View file

@ -18,7 +18,6 @@
"unreplied_for_mins": 20,
"send_notification_to": "test_unreplied@example.com",
"pop3_server": "pop.test.example.com",
"no_remaining":"0",
"append_to": "ToDo",
"imap_folder": [{"folder_name": "INBOX", "append_to": "ToDo"}, {"folder_name": "Test Folder", "append_to": "Communication"}],
"track_email_status": 1

View file

@ -27,6 +27,7 @@ from frappe.utils import (
get_hook_method,
get_string_between,
nowdate,
sbool,
split_emails,
)
@ -110,8 +111,11 @@ class EmailQueue(Document):
return self.status in ["Not Sent", "Partially Sent"]
def can_send_now(self):
hold_queue = cint(frappe.defaults.get_defaults().get("hold_queue")) == 1
if frappe.are_emails_muted() or not self.is_to_be_sent() or hold_queue:
if (
frappe.are_emails_muted()
or not self.is_to_be_sent()
or cint(frappe.db.get_default("suspend_email_queue")) == 1
):
return False
return True
@ -359,6 +363,8 @@ class SendMailContext:
@frappe.whitelist()
def retry_sending(name):
doc = frappe.get_doc("Email Queue", name)
doc.check_permission()
if doc and (doc.status == "Error" or doc.status == "Partially Errored"):
doc.status = "Not Sent"
for d in doc.recipients:
@ -371,9 +377,16 @@ def retry_sending(name):
def send_now(name):
record = EmailQueue.find(name)
if record:
record.check_permission()
record.send()
@frappe.whitelist()
def toggle_sending(enable):
frappe.only_for("System Manager")
frappe.db.set_default("suspend_email_queue", 0 if sbool(enable) else 1)
def on_doctype_update():
"""Add index in `tabCommunication` for `(reference_doctype, reference_name)`"""
frappe.db.add_index(

View file

@ -3,27 +3,37 @@ frappe.listview_settings['Email Queue'] = {
var colour = {'Sent': 'green', 'Sending': 'blue', 'Not Sent': 'grey', 'Error': 'red', 'Expired': 'orange'};
return [__(doc.status), colour[doc.status], "status,=," + doc.status];
},
refresh: function(doclist){
if (has_common(frappe.user_roles, ["Administrator", "System Manager"])){
if (cint(frappe.defaults.get_default("hold_queue"))){
doclist.page.clear_inner_toolbar()
doclist.page.add_inner_button(__("Resume Sending"), function() {
frappe.defaults.set_default("hold_queue", 0);
cur_list.refresh();
})
} else {
doclist.page.clear_inner_toolbar()
doclist.page.add_inner_button(__("Suspend Sending"), function() {
frappe.defaults.set_default("hold_queue", 1)
cur_list.refresh();
})
}
}
},
onload: function(listview) {
refresh: show_toggle_sending_button,
onload: function(list_view) {
frappe.require("logtypes.bundle.js", () => {
frappe.utils.logtypes.show_log_retention_message(cur_list.doctype);
frappe.utils.logtypes.show_log_retention_message(list_view.doctype);
})
}
}
};
function show_toggle_sending_button(list_view) {
if (!has_common(frappe.user_roles, ["Administrator", "System Manager"]))
return;
const sending_disabled = cint(frappe.sys_defaults.suspend_email_queue);
const label = sending_disabled ? __("Resume Sending") : __("Suspend Sending");
list_view.page.add_inner_button(
label,
async () => {
await frappe.xcall(
"frappe.email.doctype.email_queue.email_queue.toggle_sending",
// enable if disabled
{enable: sending_disabled}
);
// set new value for suspend_email_queue in sys_defaults
frappe.sys_defaults.suspend_email_queue = sending_disabled ? 0 : 1;
// clear the button and show one with the opposite label
list_view.page.remove_inner_button(label);
show_toggle_sending_button(list_view);
}
);
}

View file

@ -146,6 +146,7 @@ frappe.ui.form.on('Newsletter', {
if (stats.sent + stats.error >= frm.doc.total_recipients || (!stats.total && !stats.emails_queued)) {
frm.sending_status && clearInterval(frm.sending_status);
frm.sending_status = null;
return;
}
}

View file

@ -93,7 +93,7 @@ class TestNotification(unittest.TestCase):
def test_condition(self):
"""Check notification is triggered based on a condition."""
event = frappe.new_doc("Event")
event.subject = ("test",)
event.subject = "test"
event.event_type = "Private"
event.starts_on = "2014-06-06 12:00:00"
event.insert()
@ -146,7 +146,7 @@ class TestNotification(unittest.TestCase):
def test_value_changed(self):
event = frappe.new_doc("Event")
event.subject = ("test",)
event.subject = "test"
event.event_type = "Private"
event.starts_on = "2014-06-06 12:00:00"
event.insert()
@ -195,7 +195,7 @@ class TestNotification(unittest.TestCase):
frappe.db.commit()
event = frappe.new_doc("Event")
event.subject = ("test-2",)
event.subject = "test-2"
event.event_type = "Private"
event.starts_on = "2014-06-06 12:00:00"
event.insert()
@ -209,9 +209,8 @@ class TestNotification(unittest.TestCase):
event.delete()
def test_date_changed(self):
event = frappe.new_doc("Event")
event.subject = ("test",)
event.subject = "test"
event.event_type = "Private"
event.starts_on = "2014-01-01 12:00:00"
event.insert()

View file

@ -267,6 +267,7 @@ class EMail:
validate_email_address(strip(self.sender), True)
self.reply_to = validate_email_address(strip(self.reply_to) or self.sender, True)
self.set_header("X-Original-From", self.sender)
self.replace_sender()
self.replace_sender_name()
@ -279,16 +280,14 @@ class EMail:
def replace_sender(self):
if cint(self.email_account.always_use_account_email_id_as_sender):
self.set_header("X-Original-From", self.sender)
sender_name, sender_email = parse_addr(self.sender)
sender_name, _ = parse_addr(self.sender)
self.sender = email.utils.formataddr(
(str(Header(sender_name or self.email_account.name, "utf-8")), self.email_account.email_id)
)
def replace_sender_name(self):
if cint(self.email_account.always_use_account_name_as_sender_name):
self.set_header("X-Original-From", self.sender)
sender_name, sender_email = parse_addr(self.sender)
_, sender_email = parse_addr(self.sender)
self.sender = email.utils.formataddr(
(str(Header(self.email_account.name, "utf-8")), sender_email)
)

168
frappe/email/oauth.py Normal file
View file

@ -0,0 +1,168 @@
import base64
from imaplib import IMAP4
from poplib import POP3
from smtplib import SMTP
from urllib.parse import quote
import frappe
from frappe.integrations.google_oauth import GoogleOAuth
from frappe.utils.password import encrypt
class OAuthenticationError(Exception):
pass
class Oauth:
def __init__(
self,
conn: IMAP4 | POP3 | SMTP,
email_account: str,
email: str,
access_token: str,
refresh_token: str,
service: str,
mechanism: str = "XOAUTH2",
) -> None:
self.email_account = email_account
self.email = email
self.service = service
self._mechanism = mechanism
self._conn = conn
self._access_token = access_token
self._refresh_token = refresh_token
self._validate()
def _validate(self) -> None:
if self.service != "GMail":
raise NotImplementedError(
f"Service {self.service} currently doesn't have oauth implementation."
)
if not self._refresh_token:
frappe.throw(
frappe._("Please Authorize OAuth."),
OAuthenticationError,
frappe._("OAuth Error"),
)
@property
def _auth_string(self) -> str:
return f"user={self.email}\1auth=Bearer {self._access_token}\1\1"
def connect(self, _retry: int = 0) -> None:
"""Connection method with retry on exception for Oauth"""
try:
if isinstance(self._conn, POP3):
res = self._connect_pop()
if not res.startswith(b"+OK"):
raise
elif isinstance(self._conn, IMAP4):
self._connect_imap()
else:
# SMTP
self._connect_smtp()
except Exception as e:
# maybe the access token expired - refreshing
access_token = self._refresh_access_token()
if not access_token or _retry > 0:
frappe.log_error(
"OAuth Error - Authentication Failed", str(e), "Email Account", self.email_account
)
# raising a bare exception here as we have a lot of exception handling present
# where the connect method is called from - hence just logging and raising.
raise
self._access_token = access_token
self.connect(_retry + 1)
def _connect_pop(self) -> bytes:
# poplib doesn't have AUTH command implementation
res = self._conn._shortcmd(
"AUTH {} {}".format(
self._mechanism, base64.b64encode(bytes(self._auth_string, "utf-8")).decode("utf-8")
)
)
return res
def _connect_imap(self) -> None:
self._conn.authenticate(self._mechanism, lambda x: self._auth_string)
def _connect_smtp(self) -> None:
self._conn.auth(self._mechanism, lambda x: self._auth_string, initial_response_ok=False)
def _refresh_access_token(self) -> str:
"""Refreshes access token via calling `refresh_access_token` method of oauth service object"""
service_obj = self._get_service_object()
access_token = service_obj.refresh_access_token(self._refresh_token).get("access_token")
if access_token:
# set the new access token in db
frappe.db.set_value(
"Email Account",
self.email_account,
"access_token",
encrypt(access_token),
update_modified=False,
)
return access_token
def _get_service_object(self):
"""Get Oauth service object"""
return {
"GMail": GoogleOAuth("mail", validate=False),
}[self.service]
@frappe.whitelist(methods=["POST"])
def oauth_access(email_account: str, service: str):
"""Used as a default endpoint/caller for all oauth services.
Returns authorization url for redirection"""
if not service:
frappe.throw(frappe._("No Service is selected. Please select one and try again!"))
doctype = "Email Account"
if service == "GMail":
return authorize_google_access(email_account, doctype)
raise NotImplementedError(f"Service {service} currently doesn't have oauth implementation.")
def authorize_google_access(email_account, doctype: str = "Email Account", code: str = None):
"""Facilitates google oauth for email.
This is invoked 2 times - first time when user clicks `Authorze API Access` for getting the authorization url
and second time for setting the refresh and access token in db when google redirects back with oauth code."""
oauth_obj = GoogleOAuth("mail")
if not code:
return oauth_obj.get_authentication_url(
{
"redirect": f"/app/Form/{quote(doctype)}/{quote(email_account)}",
"success_query_param": "successful_authorization=1",
"email_account": email_account,
},
)
res = oauth_obj.authorize(code)
frappe.db.set_value(
doctype,
email_account,
{
"refresh_token": encrypt(res.get("refresh_token")),
"access_token": encrypt(res.get("access_token")),
},
update_modified=False,
)

View file

@ -148,7 +148,7 @@ def flush(from_test=False):
msgprint(_("Emails are muted"))
from_test = True
if cint(frappe.defaults.get_defaults().get("hold_queue")) == 1:
if cint(frappe.db.get_default("suspend_email_queue")) == 1:
return
for row in get_queue():

View file

@ -18,6 +18,7 @@ from email_reply_parser import EmailReplyParser
import frappe
from frappe import _, safe_decode, safe_encode
from frappe.core.doctype.file import MaxFileSizeReachedError, get_random_filename
from frappe.email.oauth import Oauth
from frappe.utils import (
add_days,
cint,
@ -98,7 +99,20 @@ class EmailServer:
self.imap = Timed_IMAP4(
self.settings.host, self.settings.incoming_port, timeout=frappe.conf.get("pop_timeout")
)
self.imap.login(self.settings.username, self.settings.password)
if self.settings.use_oauth:
Oauth(
self.imap,
self.settings.email_account,
self.settings.username,
self.settings.access_token,
self.settings.refresh_token,
self.settings.service,
).connect()
else:
self.imap.login(self.settings.username, self.settings.password)
# connection established!
return True
@ -119,8 +133,19 @@ class EmailServer:
self.settings.host, self.settings.incoming_port, timeout=frappe.conf.get("pop_timeout")
)
self.pop.user(self.settings.username)
self.pop.pass_(self.settings.password)
if self.settings.use_oauth:
Oauth(
self.pop,
self.settings.email_account,
self.settings.username,
self.settings.access_token,
self.settings.refresh_token,
self.settings.service,
).connect()
else:
self.pop.user(self.settings.username)
self.pop.pass_(self.settings.password)
# connection established!
return True

View file

@ -5,6 +5,7 @@ import smtplib
import frappe
from frappe import _
from frappe.email.oauth import Oauth
from frappe.utils import cint, cstr
@ -43,13 +44,31 @@ def send(email, append_to=None, retry=1):
class SMTPServer:
def __init__(self, server, login=None, password=None, port=None, use_tls=None, use_ssl=None):
def __init__(
self,
server,
login=None,
email_account=None,
password=None,
port=None,
use_tls=None,
use_ssl=None,
use_oauth=0,
refresh_token=None,
access_token=None,
service=None,
):
self.login = login
self.email_account = email_account
self.password = password
self._server = server
self._port = port
self.use_tls = use_tls
self.use_ssl = use_ssl
self.use_oauth = use_oauth
self.refresh_token = refresh_token
self.access_token = access_token
self.service = service
self._session = None
if not self.server:
@ -91,7 +110,13 @@ class SMTPServer:
)
self.secure_session(_session)
if self.login and self.password:
if self.use_oauth:
Oauth(
_session, self.email_account, self.login, self.access_token, self.refresh_token, self.service
).connect()
elif self.password:
res = _session.login(str(self.login or ""), str(self.password or ""))
# check if logged correctly
@ -122,7 +147,7 @@ class SMTPServer:
@classmethod
def throw_invalid_credentials_exception(cls):
frappe.throw(
_("Incorrect email or password. Please check your login credentials."),
_("Please check your email login credentials."),
title=_("Invalid Credentials"),
exc=InvalidEmailCredentials,
)

View file

@ -1,5 +1,6 @@
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and contributors
# License: MIT. See LICENSE
import imaplib
import poplib

View file

@ -1,8 +1,6 @@
# Copyright (c) 2020, Frappe Technologies and contributors
# License: MIT. See LICENSE
from pymysql import InternalError
import frappe
@ -66,7 +64,7 @@ def return_location(doctype, filters_sql):
coords = frappe.db.sql(
f"""SELECT name, location FROM `tab{doctype}` WHERE {filters_sql}""", as_dict=True
)
except InternalError:
except frappe.db.InternalError:
frappe.msgprint(frappe._("This Doctype does not contain location fields"), raise_exception=True)
return
else:
@ -82,7 +80,7 @@ def return_coordinates(doctype, filters_sql):
f"""SELECT name, latitude, longitude FROM `tab{doctype}` WHERE {filters_sql}""",
as_dict=True,
)
except InternalError:
except frappe.db.InternalError:
frappe.msgprint(
frappe._("This Doctype does not contain latitude and longitude fields"), raise_exception=True
)

View file

@ -180,12 +180,10 @@ doc_events = {
"on_update": "frappe.integrations.doctype.google_contacts.google_contacts.update_contacts_to_google_contacts",
},
"DocType": {
"after_insert": "frappe.cache_manager.build_domain_restriced_doctype_cache",
"after_save": "frappe.cache_manager.build_domain_restriced_doctype_cache",
"on_update": "frappe.cache_manager.build_domain_restriced_doctype_cache",
},
"Page": {
"after_insert": "frappe.cache_manager.build_domain_restriced_page_cache",
"after_save": "frappe.cache_manager.build_domain_restriced_page_cache",
"on_update": "frappe.cache_manager.build_domain_restriced_page_cache",
},
}
@ -242,7 +240,6 @@ scheduler_events = {
"weekly_long": [
"frappe.integrations.doctype.dropbox_settings.dropbox_settings.take_backups_weekly",
"frappe.integrations.doctype.s3_backup_settings.s3_backup_settings.take_backups_weekly",
"frappe.desk.doctype.route_history.route_history.flush_old_route_records",
"frappe.desk.form.document_follow.send_weekly_updates",
"frappe.social.doctype.energy_point_log.energy_point_log.send_weekly_summary",
"frappe.integrations.doctype.google_drive.google_drive.weekly_backup",

View file

@ -1,4 +1,4 @@
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import json
@ -10,7 +10,20 @@ import click
import frappe
from frappe.defaults import _clear_cache
from frappe.utils import is_git_url
from frappe.utils import cint, is_git_url
def _is_scheduler_enabled() -> bool:
enable_scheduler = False
try:
frappe.connect()
enable_scheduler = cint(frappe.db.get_single_value("System Settings", "enable_scheduler"))
except Exception:
pass
finally:
frappe.db.close()
return bool(enable_scheduler)
def _new_site(
@ -29,11 +42,9 @@ def _new_site(
db_type=None,
db_host=None,
db_port=None,
new_site=False,
):
"""Install a new Frappe site"""
from frappe.commands.scheduler import _is_scheduler_enabled
from frappe.utils import get_site_path, scheduler, touch_file
if not force and os.path.exists(site):

View file

@ -62,21 +62,21 @@ def take_backups_weekly():
def take_backups_if(freq):
if frappe.db.get_value("Dropbox Settings", None, "backup_frequency") == freq:
if frappe.db.get_single_value("Dropbox Settings", "backup_frequency") == freq:
take_backup_to_dropbox()
def take_backup_to_dropbox(retry_count=0, upload_db_backup=True):
did_not_upload, error_log = [], []
try:
if cint(frappe.db.get_value("Dropbox Settings", None, "enabled")):
if cint(frappe.db.get_single_value("Dropbox Settings", "enabled")):
validate_file_size()
did_not_upload, error_log = backup_to_dropbox(upload_db_backup)
if did_not_upload:
raise Exception
if cint(frappe.db.get_value("Dropbox Settings", None, "send_email_for_successful_backup")):
if cint(frappe.db.get_single_value("Dropbox Settings", "send_email_for_successful_backup")):
send_email(True, "Dropbox", "Dropbox Settings", "send_notifications_to")
except JobTimeoutException:
if retry_count < 2:

View file

@ -13,7 +13,7 @@ from googleapiclient.errors import HttpError
import frappe
from frappe import _
from frappe.integrations.doctype.google_settings.google_settings import get_auth_url
from frappe.integrations.google_oauth import GoogleOAuth
from frappe.model.document import Document
from frappe.utils import (
add_days,
@ -90,7 +90,7 @@ class GoogleCalendar(Document):
}
try:
r = requests.post(get_auth_url(), data=data).json()
r = requests.post(GoogleOAuth.OAUTH_URL, data=data).json()
except requests.exceptions.HTTPError:
button_label = frappe.bold(_("Allow Google Calendar Access"))
frappe.throw(
@ -130,7 +130,7 @@ def authorize_access(g_calendar, reauthorize=None):
"redirect_uri": redirect_uri,
"grant_type": "authorization_code",
}
r = requests.post(get_auth_url(), data=data).json()
r = requests.post(GoogleOAuth.OAUTH_URL, data=data).json()
if "refresh_token" in r:
frappe.db.set_value(
@ -191,7 +191,7 @@ def get_google_calendar_object(g_calendar):
credentials_dict = {
"token": account.get_access_token(),
"refresh_token": account.get_password(fieldname="refresh_token", raise_exception=False),
"token_uri": get_auth_url(),
"token_uri": GoogleOAuth.OAUTH_URL,
"client_id": google_settings.client_id,
"client_secret": google_settings.get_password(fieldname="client_secret", raise_exception=False),
"scopes": "https://www.googleapis.com/auth/calendar/v3",

View file

@ -37,16 +37,11 @@ frappe.ui.form.on('Google Contacts', {
}
},
authorize_google_contacts_access: function(frm) {
let reauthorize = 0;
if(frm.doc.authorization_code) {
reauthorize = 1;
}
frappe.call({
method: "frappe.integrations.doctype.google_contacts.google_contacts.authorize_access",
args: {
"g_contact": frm.doc.name,
"reauthorize": reauthorize
"reauthorize": frm.doc.authorization_code ? 1 : 0
},
callback: function(r) {
if(!r.exc) {

View file

@ -2,18 +2,14 @@
# License: MIT. See LICENSE
import google.oauth2.credentials
import requests
from googleapiclient.discovery import build
from urllib.parse import quote
from googleapiclient.errors import HttpError
import frappe
from frappe import _
from frappe.integrations.doctype.google_settings.google_settings import get_auth_url
from frappe.integrations.google_oauth import GoogleOAuth
from frappe.model.document import Document
from frappe.utils import get_request_site_address
SCOPES = "https://www.googleapis.com/auth/contacts"
class GoogleContacts(Document):
@ -22,120 +18,56 @@ class GoogleContacts(Document):
frappe.throw(_("Enable Google API in Google Settings."))
def get_access_token(self):
google_settings = frappe.get_doc("Google Settings")
if not google_settings.enable:
frappe.throw(_("Google Contacts Integration is disabled."))
if not self.refresh_token:
button_label = frappe.bold(_("Allow Google Contacts Access"))
raise frappe.ValidationError(_("Click on {0} to generate Refresh Token.").format(button_label))
data = {
"client_id": google_settings.client_id,
"client_secret": google_settings.get_password(fieldname="client_secret", raise_exception=False),
"refresh_token": self.get_password(fieldname="refresh_token", raise_exception=False),
"grant_type": "refresh_token",
"scope": SCOPES,
}
try:
r = requests.post(get_auth_url(), data=data).json()
except requests.exceptions.HTTPError:
button_label = frappe.bold(_("Allow Google Contacts Access"))
frappe.throw(
_(
"Something went wrong during the token generation. Click on {0} to generate a new one."
).format(button_label)
)
oauth_obj = GoogleOAuth("contacts")
r = oauth_obj.refresh_access_token(
self.get_password(fieldname="refresh_token", raise_exception=False)
)
return r.get("access_token")
@frappe.whitelist()
def authorize_access(g_contact, reauthorize=None):
@frappe.whitelist(methods=["POST"])
def authorize_access(g_contact, reauthorize=False, code=None):
"""
If no Authorization code get it from Google and then request for Refresh Token.
Google Contact Name is set to flags to set_value after Authorization Code is obtained.
"""
google_settings = frappe.get_doc("Google Settings")
google_contact = frappe.get_doc("Google Contacts", g_contact)
redirect_uri = (
get_request_site_address(True)
+ "?cmd=frappe.integrations.doctype.google_contacts.google_contacts.google_callback"
oauth_code = (
frappe.db.get_value("Google Contacts", g_contact, "authorization_code") if not code else code
)
oauth_obj = GoogleOAuth("contacts")
if not google_contact.authorization_code or reauthorize:
frappe.cache().hset("google_contacts", "google_contact", google_contact.name)
return get_authentication_url(client_id=google_settings.client_id, redirect_uri=redirect_uri)
else:
try:
data = {
"code": google_contact.authorization_code,
"client_id": google_settings.client_id,
"client_secret": google_settings.get_password(
fieldname="client_secret", raise_exception=False
),
"redirect_uri": redirect_uri,
"grant_type": "authorization_code",
}
r = requests.post(get_auth_url(), data=data).json()
if "refresh_token" in r:
frappe.db.set_value(
"Google Contacts", google_contact.name, "refresh_token", r.get("refresh_token")
)
frappe.db.commit()
frappe.local.response["type"] = "redirect"
frappe.local.response["location"] = f"/app/Form/Google%20Contacts/{google_contact.name}"
frappe.msgprint(_("Google Contacts has been configured."))
except Exception as e:
frappe.throw(e)
def get_authentication_url(client_id=None, redirect_uri=None):
return {
"url": "https://accounts.google.com/o/oauth2/v2/auth?access_type=offline&response_type=code&prompt=consent&client_id={}&include_granted_scopes=true&scope={}&redirect_uri={}".format(
client_id, SCOPES, redirect_uri
if not oauth_code or reauthorize:
return oauth_obj.get_authentication_url(
{
"g_contact": g_contact,
"redirect": f"/app/Form/{quote('Google Contacts')}/{quote(g_contact)}",
},
)
}
@frappe.whitelist()
def google_callback(code=None):
"""
Authorization code is sent to callback as per the API configuration
"""
google_contact = frappe.cache().hget("google_contacts", "google_contact")
frappe.db.set_value("Google Contacts", google_contact, "authorization_code", code)
frappe.db.commit()
authorize_access(google_contact)
r = oauth_obj.authorize(oauth_code)
frappe.db.set_value(
"Google Contacts",
g_contact,
{"authorization_code": oauth_code, "refresh_token": r.get("refresh_token")},
)
def get_google_contacts_object(g_contact):
"""
Returns an object of Google Calendar along with Google Calendar doc.
"""
google_settings = frappe.get_doc("Google Settings")
account = frappe.get_doc("Google Contacts", g_contact)
oauth_obj = GoogleOAuth("contacts")
credentials_dict = {
"token": account.get_access_token(),
"refresh_token": account.get_password(fieldname="refresh_token", raise_exception=False),
"token_uri": get_auth_url(),
"client_id": google_settings.client_id,
"client_secret": google_settings.get_password(fieldname="client_secret", raise_exception=False),
"scopes": "https://www.googleapis.com/auth/contacts",
}
credentials = google.oauth2.credentials.Credentials(**credentials_dict)
google_contacts = build(
serviceName="people", version="v1", credentials=credentials, static_discovery=False
google_contacts = oauth_obj.get_google_service_object(
account.get_access_token(),
account.get_password(fieldname="indexing_refresh_token", raise_exception=False),
)
return google_contacts, account

View file

@ -41,15 +41,10 @@ frappe.ui.form.on('Google Drive', {
}
},
authorize_google_drive_access: function(frm) {
let reauthorize = 0;
if (frm.doc.authorization_code) {
reauthorize = 1;
}
frappe.call({
method: "frappe.integrations.doctype.google_drive.google_drive.authorize_access",
args: {
"reauthorize": reauthorize
"reauthorize": frm.doc.authorization_code ? 1 : 0
},
callback: function(r) {
if (!r.exc) {

View file

@ -4,27 +4,22 @@
import os
from urllib.parse import quote
import google.oauth2.credentials
import requests
from apiclient.http import MediaFileUpload
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
import frappe
from frappe import _
from frappe.integrations.doctype.google_settings.google_settings import get_auth_url
from frappe.integrations.google_oauth import GoogleOAuth
from frappe.integrations.offsite_backup_utils import (
get_latest_backup_file,
send_email,
validate_file_size,
)
from frappe.model.document import Document
from frappe.utils import get_backups_path, get_bench_path, get_request_site_address
from frappe.utils import get_backups_path, get_bench_path
from frappe.utils.background_jobs import enqueue
from frappe.utils.backups import new_backup
SCOPES = "https://www.googleapis.com/auth/drive"
class GoogleDrive(Document):
def validate(self):
@ -33,118 +28,57 @@ class GoogleDrive(Document):
self.backup_folder_id = ""
def get_access_token(self):
google_settings = frappe.get_doc("Google Settings")
if not google_settings.enable:
frappe.throw(_("Google Integration is disabled."))
if not self.refresh_token:
button_label = frappe.bold(_("Allow Google Drive Access"))
raise frappe.ValidationError(_("Click on {0} to generate Refresh Token.").format(button_label))
data = {
"client_id": google_settings.client_id,
"client_secret": google_settings.get_password(fieldname="client_secret", raise_exception=False),
"refresh_token": self.get_password(fieldname="refresh_token", raise_exception=False),
"grant_type": "refresh_token",
"scope": SCOPES,
}
try:
r = requests.post(get_auth_url(), data=data).json()
except requests.exceptions.HTTPError:
button_label = frappe.bold(_("Allow Google Drive Access"))
frappe.throw(
_(
"Something went wrong during the token generation. Click on {0} to generate a new one."
).format(button_label)
)
oauth_obj = GoogleOAuth("drive")
r = oauth_obj.refresh_access_token(
self.get_password(fieldname="refresh_token", raise_exception=False)
)
return r.get("access_token")
@frappe.whitelist()
def authorize_access(reauthorize=None):
@frappe.whitelist(methods=["POST"])
def authorize_access(reauthorize=False, code=None):
"""
If no Authorization code get it from Google and then request for Refresh Token.
Google Contact Name is set to flags to set_value after Authorization Code is obtained.
"""
google_settings = frappe.get_doc("Google Settings")
google_drive = frappe.get_doc("Google Drive")
redirect_uri = (
get_request_site_address(True)
+ "?cmd=frappe.integrations.doctype.google_drive.google_drive.google_callback"
oauth_code = (
frappe.db.get_single_value("Google Drive", "authorization_code") if not code else code
)
oauth_obj = GoogleOAuth("drive")
if not google_drive.authorization_code or reauthorize:
if not oauth_code or reauthorize:
if reauthorize:
frappe.db.set_value("Google Drive", None, "backup_folder_id", "")
return get_authentication_url(client_id=google_settings.client_id, redirect_uri=redirect_uri)
else:
try:
data = {
"code": google_drive.authorization_code,
"client_id": google_settings.client_id,
"client_secret": google_settings.get_password(
fieldname="client_secret", raise_exception=False
),
"redirect_uri": redirect_uri,
"grant_type": "authorization_code",
}
r = requests.post(get_auth_url(), data=data).json()
if "refresh_token" in r:
frappe.db.set_value("Google Drive", google_drive.name, "refresh_token", r.get("refresh_token"))
frappe.db.commit()
frappe.local.response["type"] = "redirect"
frappe.local.response["location"] = "/app/Form/{}".format(quote("Google Drive"))
frappe.msgprint(_("Google Drive has been configured."))
except Exception as e:
frappe.throw(e)
def get_authentication_url(client_id, redirect_uri):
return {
"url": "https://accounts.google.com/o/oauth2/v2/auth?access_type=offline&response_type=code&prompt=consent&client_id={}&include_granted_scopes=true&scope={}&redirect_uri={}".format(
client_id, SCOPES, redirect_uri
return oauth_obj.get_authentication_url(
{
"redirect": f"/app/Form/{quote('Google Drive')}",
},
)
}
@frappe.whitelist()
def google_callback(code=None):
"""
Authorization code is sent to callback as per the API configuration
"""
frappe.db.set_value("Google Drive", None, "authorization_code", code)
frappe.db.commit()
authorize_access()
r = oauth_obj.authorize(oauth_code)
frappe.db.set_value(
"Google Drive",
"Google Drive",
{"authorization_code": oauth_code, "refresh_token": r.get("refresh_token")},
)
def get_google_drive_object():
"""
Returns an object of Google Drive.
"""
google_settings = frappe.get_doc("Google Settings")
account = frappe.get_doc("Google Drive")
oauth_obj = GoogleOAuth("drive")
credentials_dict = {
"token": account.get_access_token(),
"refresh_token": account.get_password(fieldname="refresh_token", raise_exception=False),
"token_uri": get_auth_url(),
"client_id": google_settings.client_id,
"client_secret": google_settings.get_password(fieldname="client_secret", raise_exception=False),
"scopes": "https://www.googleapis.com/auth/drive/v3",
}
credentials = google.oauth2.credentials.Credentials(**credentials_dict)
google_drive = build(
serviceName="drive", version="v3", credentials=credentials, static_discovery=False
google_drive = oauth_obj.get_google_service_object(
account.get_access_token(),
account.get_password(fieldname="indexing_refresh_token", raise_exception=False),
)
return google_drive, account

View file

@ -9,10 +9,6 @@ class GoogleSettings(Document):
pass
def get_auth_url():
return "https://www.googleapis.com/oauth2/v4/token"
@frappe.whitelist()
def get_file_picker_settings():
"""Return all the data FileUploader needs to start the Google Drive Picker."""

View file

@ -1,4 +1,5 @@
{
"actions": [],
"creation": "2019-05-29 01:24:29.585060",
"doctype": "DocType",
"editable_grid": 1,
@ -19,13 +20,14 @@
"fieldname": "erpnext_role",
"fieldtype": "Link",
"in_list_view": 1,
"label": "ERPNext Role",
"label": "User Role",
"options": "Role",
"reqd": 1
}
],
"istable": 1,
"modified": "2019-07-15 06:46:38.050408",
"links": [],
"modified": "2022-07-07 16:28:44.828514",
"modified_by": "Administrator",
"module": "Integrations",
"name": "LDAP Group Mapping",
@ -34,5 +36,6 @@
"quick_entry": 1,
"sort_field": "modified",
"sort_order": "ASC",
"states": [],
"track_changes": 1
}

View file

@ -42,7 +42,10 @@
"column_break_33",
"ldap_group_member_attribute",
"ldap_group_mappings_section",
"default_user_type",
"column_break_38",
"default_role",
"section_break_40",
"ldap_groups",
"ldap_group_field"
],
@ -79,9 +82,11 @@
"reqd": 1
},
{
"depends_on": "eval: doc.default_user_type == \"System User\"",
"fieldname": "default_role",
"fieldtype": "Link",
"label": "Default Role on Creation",
"label": "Default User Role",
"mandatory_depends_on": "eval: doc.default_user_type == \"System User\"",
"options": "Role",
"reqd": 1
},
@ -249,10 +254,10 @@
"label": "Group Object Class"
},
{
"description": "string value, i.e. {0} or uid={0},ou=users,dc=example,dc=com",
"fieldname": "ldap_custom_group_search",
"fieldtype": "Data",
"label": "Custom Group Search"
"description": "string value, i.e. {0} or uid={0},ou=users,dc=example,dc=com",
"fieldname": "ldap_custom_group_search",
"fieldtype": "Data",
"label": "Custom Group Search"
},
{
"description": "Requires any valid fdn path. i.e. ou=users,dc=example,dc=com",
@ -268,12 +273,28 @@
"fieldtype": "Data",
"label": "LDAP search path for Groups",
"reqd": 1
},
{
"fieldname": "default_user_type",
"fieldtype": "Link",
"label": "Default User Type",
"options": "User Type",
"reqd": 1
},
{
"fieldname": "column_break_38",
"fieldtype": "Column Break"
},
{
"fieldname": "section_break_40",
"fieldtype": "Section Break",
"hide_border": 1
}
],
"in_create": 1,
"issingle": 1,
"links": [],
"modified": "2021-07-27 11:51:43.328271",
"modified": "2022-07-07 16:51:46.230793",
"modified_by": "Administrator",
"module": "Integrations",
"name": "LDAP Settings",
@ -294,5 +315,6 @@
"read_only": 1,
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}

View file

@ -1,19 +1,37 @@
# Copyright (c) 2015, Frappe Technologies and contributors
# Copyright (c) 2022, Frappe Technologies and contributors
# License: MIT. See LICENSE
import ssl
from typing import TYPE_CHECKING
import ldap3
from ldap3 import AUTO_BIND_TLS_BEFORE_BIND, HASHED_SALTED_SHA, MODIFY_REPLACE
from ldap3.abstract.entry import Entry
from ldap3.core.exceptions import (
LDAPAttributeError,
LDAPInvalidCredentialsResult,
LDAPInvalidFilterError,
LDAPNoSuchObjectResult,
)
from ldap3.utils.hashed import hashed
import frappe
from frappe import _, safe_encode
from frappe.model.document import Document
from frappe.twofactor import authenticate_for_2factor, confirm_otp_token, should_run_2fa
if TYPE_CHECKING:
from frappe.core.doctype.user.user import User
class LDAPSettings(Document):
def validate(self):
self.default_user_type = self.default_user_type or "System User"
if not self.enabled:
return
if not self.flags.ignore_mandatory:
if (
self.ldap_search_string.count("(") == self.ldap_search_string.count(")")
and self.ldap_search_string.startswith("(")
@ -28,8 +46,6 @@ class LDAPSettings(Document):
try:
if conn.result["type"] == "bindResponse" and self.base_dn:
import ldap3
conn.search(
search_base=self.ldap_search_path_user,
search_filter="(objectClass=*)",
@ -40,13 +56,13 @@ class LDAPSettings(Document):
search_base=self.ldap_search_path_group, search_filter="(objectClass=*)", attributes=["cn"]
)
except ldap3.core.exceptions.LDAPAttributeError as ex:
except LDAPAttributeError as ex:
frappe.throw(
_("LDAP settings incorrect. validation response was: {0}").format(ex),
title=_("Misconfigured"),
)
except ldap3.core.exceptions.LDAPNoSuchObjectResult:
except LDAPNoSuchObjectResult:
frappe.throw(
_("Ensure the user and group search paths are correct."), title=_("Misconfigured")
)
@ -75,12 +91,8 @@ class LDAPSettings(Document):
)
)
def connect_to_ldap(self, base_dn, password, read_only=True):
def connect_to_ldap(self, base_dn, password, read_only=True) -> ldap3.Connection:
try:
import ssl
import ldap3
if self.require_trusted_certificate == "Yes":
tls_configuration = ldap3.Tls(validate=ssl.CERT_REQUIRED, version=ssl.PROTOCOL_TLS_CLIENT)
else:
@ -94,9 +106,9 @@ class LDAPSettings(Document):
tls_configuration.ca_certs_file = self.local_ca_certs_file
server = ldap3.Server(host=self.ldap_server_url, tls=tls_configuration)
bind_type = ldap3.AUTO_BIND_TLS_BEFORE_BIND if self.ssl_tls_mode == "StartTLS" else True
bind_type = AUTO_BIND_TLS_BEFORE_BIND if self.ssl_tls_mode == "StartTLS" else True
conn = ldap3.Connection(
return ldap3.Connection(
server=server,
user=base_dn,
password=password,
@ -105,18 +117,16 @@ class LDAPSettings(Document):
raise_exceptions=True,
)
return conn
except ImportError:
msg = _("Please Install the ldap3 library via pip to use ldap functionality.")
frappe.throw(msg, title=_("LDAP Not Installed"))
except ldap3.core.exceptions.LDAPInvalidCredentialsResult:
except LDAPInvalidCredentialsResult:
frappe.throw(_("Invalid username or password"))
except Exception as ex:
frappe.throw(_(str(ex)))
@staticmethod
def get_ldap_client_settings():
def get_ldap_client_settings() -> dict:
# return the settings to be used on the client side.
result = {"enabled": False}
ldap = frappe.get_cached_doc("LDAP Settings")
@ -126,21 +136,19 @@ class LDAPSettings(Document):
return result
@classmethod
def update_user_fields(cls, user, user_data):
def update_user_fields(cls, user: "User", user_data: dict):
updatable_data = {key: value for key, value in user_data.items() if key != "email"}
for key, value in updatable_data.items():
setattr(user, key, value)
user.save(ignore_permissions=True)
def sync_roles(self, user, additional_groups=None):
def sync_roles(self, user: "User", additional_groups: list = None):
current_roles = {d.role for d in user.get("roles")}
needed_roles = set()
needed_roles.add(self.default_role)
if self.default_user_type == "System User":
needed_roles = {self.default_role}
else:
needed_roles = set()
lower_groups = [g.lower() for g in additional_groups or []]
all_mapped_roles = {r.erpnext_role for r in self.ldap_groups}
@ -157,28 +165,31 @@ class LDAPSettings(Document):
user.remove_roles(*roles_to_remove)
def create_or_update_user(self, user_data, groups=None):
user = None
def create_or_update_user(self, user_data: dict, groups: list = None):
user: "User" = None
role: str = None
if frappe.db.exists("User", user_data["email"]):
user = frappe.get_doc("User", user_data["email"])
LDAPSettings.update_user_fields(user=user, user_data=user_data)
else:
doc = user_data
doc.update(
{
"doctype": "User",
"send_welcome_email": 0,
"language": "",
"user_type": "System User",
# "roles": [{
# "role": self.default_role
# }]
}
)
doc = user_data | {
"doctype": "User",
"send_welcome_email": 0,
"language": "",
"user_type": self.default_user_type,
}
user = frappe.get_doc(doc)
user.insert(ignore_permissions=True)
# always add default role.
user.add_roles(self.default_role)
if self.default_user_type == "System User":
role = self.default_role
else:
role = frappe.db.get_value("User Type", user.user_type, "role")
if role:
user.add_roles(role)
self.sync_roles(user, groups)
return user
@ -203,38 +214,28 @@ class LDAPSettings(Document):
return ldap_attributes
def fetch_ldap_groups(self, user, conn):
import ldap3
def fetch_ldap_groups(self, user: Entry, conn: ldap3.Connection) -> list:
if not isinstance(user, Entry):
raise TypeError("Invalid type, attribute 'user' must be of type 'ldap3.abstract.entry.Entry'")
if type(user) is not ldap3.abstract.entry.Entry:
raise TypeError(
"Invalid type, attribute {} must be of type '{}'".format("user", "ldap3.abstract.entry.Entry")
)
if type(conn) is not ldap3.core.connection.Connection:
raise TypeError(
"Invalid type, attribute {} must be of type '{}'".format("conn", "ldap3.Connection")
)
if not isinstance(conn, ldap3.Connection):
raise TypeError("Invalid type, attribute 'conn' must be of type 'ldap3.Connection'")
fetch_ldap_groups = None
ldap_object_class = None
ldap_group_members_attribute = None
if self.ldap_directory_server.lower() == "active directory":
ldap_object_class = "Group"
ldap_group_members_attribute = "member"
user_search_str = user.entry_dn
elif self.ldap_directory_server.lower() == "openldap":
ldap_object_class = "posixgroup"
ldap_group_members_attribute = "memberuid"
user_search_str = getattr(user, self.ldap_username_field).value
elif self.ldap_directory_server.lower() == "custom":
ldap_object_class = self.ldap_group_objectclass
ldap_group_members_attribute = self.ldap_group_member_attribute
ldap_custom_group_search = self.ldap_custom_group_search or "{0}"
@ -245,39 +246,31 @@ class LDAPSettings(Document):
# this path will be hit for everyone with preconfigured ldap settings. this must be taken into account so as not to break ldap for those users.
if self.ldap_group_field:
fetch_ldap_groups = getattr(user, self.ldap_group_field).values
if ldap_object_class is not None:
conn.search(
search_base=self.ldap_search_path_group,
search_filter="(&(objectClass={})({}={}))".format(
ldap_object_class, ldap_group_members_attribute, user_search_str
),
search_filter=f"(&(objectClass={ldap_object_class})({ldap_group_members_attribute}={user_search_str}))",
attributes=["cn"],
) # Build search query
if len(conn.entries) >= 1:
fetch_ldap_groups = []
for group in conn.entries:
fetch_ldap_groups.append(group["cn"].value)
return fetch_ldap_groups
def authenticate(self, username, password):
def authenticate(self, username: str, password: str):
if not self.enabled:
frappe.throw(_("LDAP is not enabled."))
user_filter = self.ldap_search_string.format(username)
ldap_attributes = self.get_ldap_attributes()
conn = self.connect_to_ldap(self.base_dn, self.get_password(raise_exception=False))
try:
import ldap3
conn.search(
search_base=self.ldap_search_path_user,
search_filter=f"{user_filter}",
@ -286,26 +279,21 @@ class LDAPSettings(Document):
if len(conn.entries) == 1 and conn.entries[0]:
user = conn.entries[0]
groups = self.fetch_ldap_groups(user, conn)
# only try and connect as the user, once we have their fqdn entry.
if user.entry_dn and password and conn.rebind(user=user.entry_dn, password=password):
return self.create_or_update_user(self.convert_ldap_entry_to_dict(user), groups=groups)
raise ldap3.core.exceptions.LDAPInvalidCredentialsResult # even though nothing foundor failed authentication raise invalid credentials
raise LDAPInvalidCredentialsResult # even though nothing foundor failed authentication raise invalid credentials
except ldap3.core.exceptions.LDAPInvalidFilterError:
except LDAPInvalidFilterError:
frappe.throw(_("Please use a valid LDAP search filter"), title=_("Misconfigured"))
except ldap3.core.exceptions.LDAPInvalidCredentialsResult:
except LDAPInvalidCredentialsResult:
frappe.throw(_("Invalid username or password"))
def reset_password(self, user, password, logout_sessions=False):
from ldap3 import HASHED_SALTED_SHA, MODIFY_REPLACE
from ldap3.utils.hashed import hashed
search_filter = f"({self.ldap_email_field}={user})"
conn = self.connect_to_ldap(
@ -334,8 +322,7 @@ class LDAPSettings(Document):
else:
frappe.throw(_("No LDAP User found for email: {0}").format(user))
def convert_ldap_entry_to_dict(self, user_entry):
def convert_ldap_entry_to_dict(self, user_entry: Entry):
# support multiple email values
email = user_entry[self.ldap_email_field]
@ -346,7 +333,6 @@ class LDAPSettings(Document):
}
# optional fields
if self.ldap_middle_name_field:
data["middle_name"] = user_entry[self.ldap_middle_name_field].value
@ -366,7 +352,7 @@ class LDAPSettings(Document):
def login():
# LDAP LOGIN LOGIC
args = frappe.form_dict
ldap = frappe.get_doc("LDAP Settings")
ldap: LDAPSettings = frappe.get_doc("LDAP Settings")
user = ldap.authenticate(frappe.as_unicode(args.usr), frappe.as_unicode(args.pwd))
@ -383,7 +369,7 @@ def login():
@frappe.whitelist()
def reset_password(user, password, logout):
ldap = frappe.get_doc("LDAP Settings")
ldap: LDAPSettings = frappe.get_doc("LDAP Settings")
if not ldap.enabled:
frappe.throw(_("LDAP is not enabled."))
ldap.reset_password(user, password, logout_sessions=int(logout))

View file

@ -1,15 +1,16 @@
# Copyright (c) 2019, Frappe Technologies and Contributors
# Copyright (c) 2022, Frappe Technologies and Contributors
# License: MIT. See LICENSE
import contextlib
import functools
import os
import ssl
import unittest
from unittest import mock
from unittest import TestCase, mock
import ldap3
from ldap3 import MOCK_SYNC, OFFLINE_AD_2012_R2, OFFLINE_SLAPD_2_4, Connection, Server
import frappe
from frappe.exceptions import MandatoryError, ValidationError
from frappe.integrations.doctype.ldap_settings.ldap_settings import LDAPSettings
@ -22,15 +23,19 @@ class LDAP_TestCase:
LDAP_LDIF_JSON = None
TEST_VALUES_LDAP_COMPLEX_SEARCH_STRING = None
# for adding type hints during development ^_^
assertTrue = TestCase.assertTrue
assertEqual = TestCase.assertEqual
assertIn = TestCase.assertIn
def mock_ldap_connection(f):
@functools.wraps(f)
def wrapped(self, *args, **kwargs):
with mock.patch(
"frappe.integrations.doctype.ldap_settings.ldap_settings.LDAPSettings.connect_to_ldap"
) as mock_connection:
mock_connection.return_value = self.connection
"frappe.integrations.doctype.ldap_settings.ldap_settings.LDAPSettings.connect_to_ldap",
return_value=self.connection,
):
self.test_class = LDAPSettings(self.doc)
# Create a clean doc
@ -47,80 +52,66 @@ class LDAP_TestCase:
return wrapped
def clean_test_users():
try: # clean up test user 1
with contextlib.suppress(Exception):
frappe.get_doc("User", "posix.user1@unit.testing").delete()
except Exception:
pass
try: # clean up test user 2
with contextlib.suppress(Exception):
frappe.get_doc("User", "posix.user2@unit.testing").delete()
except Exception:
pass
with contextlib.suppress(Exception):
frappe.get_doc("User", "website_ldap_user@test.com").delete()
@classmethod
def setUpClass(self, ldapServer="OpenLDAP"):
self.clean_test_users()
def setUpClass(cls):
cls.clean_test_users()
# Save user data for restoration in tearDownClass()
self.user_ldap_settings = frappe.get_doc("LDAP Settings")
cls.user_ldap_settings = frappe.get_doc("LDAP Settings")
# Create test user1
self.user1doc = {
cls.user1doc = {
"username": "posix.user",
"email": "posix.user1@unit.testing",
"first_name": "posix",
"doctype": "User",
"send_welcome_email": 0,
"language": "",
"user_type": "System User",
}
self.user1doc.update(
{
"doctype": "User",
"send_welcome_email": 0,
"language": "",
"user_type": "System User",
}
)
user = frappe.get_doc(self.user1doc)
user = frappe.get_doc(cls.user1doc)
user.insert(ignore_permissions=True)
# Create test user1
self.user2doc = {
cls.user2doc = {
"username": "posix.user2",
"email": "posix.user2@unit.testing",
"first_name": "posix",
"doctype": "User",
"send_welcome_email": 0,
"language": "",
"user_type": "System User",
}
self.user2doc.update(
{
"doctype": "User",
"send_welcome_email": 0,
"language": "",
"user_type": "System User",
}
)
user = frappe.get_doc(self.user2doc)
user = frappe.get_doc(cls.user2doc)
user.insert(ignore_permissions=True)
# Setup Mock OpenLDAP Directory
self.ldap_dc_path = "dc=unit,dc=testing"
self.ldap_user_path = "ou=users," + self.ldap_dc_path
self.ldap_group_path = "ou=groups," + self.ldap_dc_path
self.base_dn = "cn=base_dn_user," + self.ldap_dc_path
self.base_password = "my_password"
self.ldap_server = "ldap://my_fake_server:389"
cls.ldap_dc_path = "dc=unit,dc=testing"
cls.ldap_user_path = f"ou=users,{cls.ldap_dc_path}"
cls.ldap_group_path = f"ou=groups,{cls.ldap_dc_path}"
cls.base_dn = f"cn=base_dn_user,{cls.ldap_dc_path}"
cls.base_password = "my_password"
cls.ldap_server = "ldap://my_fake_server:389"
self.doc = {
cls.doc = {
"doctype": "LDAP Settings",
"enabled": True,
"ldap_directory_server": self.TEST_LDAP_SERVER,
"ldap_server_url": self.ldap_server,
"base_dn": self.base_dn,
"password": self.base_password,
"ldap_search_path_user": self.ldap_user_path,
"ldap_search_string": self.TEST_LDAP_SEARCH_STRING,
"ldap_search_path_group": self.ldap_group_path,
"ldap_directory_server": cls.TEST_LDAP_SERVER,
"ldap_server_url": cls.ldap_server,
"base_dn": cls.base_dn,
"password": cls.base_password,
"ldap_search_path_user": cls.ldap_user_path,
"ldap_search_string": cls.TEST_LDAP_SEARCH_STRING,
"ldap_search_path_group": cls.ldap_group_path,
"ldap_user_creation_and_mapping_section": "",
"ldap_email_field": "mail",
"ldap_username_field": self.LDAP_USERNAME_FIELD,
"ldap_username_field": cls.LDAP_USERNAME_FIELD,
"ldap_first_name_field": "givenname",
"ldap_middle_name_field": "",
"ldap_last_name_field": "sn",
@ -135,50 +126,41 @@ class LDAP_TestCase:
"ldap_group_objectclass": "",
"ldap_group_member_attribute": "",
"default_role": "Newsletter Manager",
"ldap_groups": self.DOCUMENT_GROUP_MAPPINGS,
"ldap_groups": cls.DOCUMENT_GROUP_MAPPINGS,
"ldap_group_field": "",
"default_user_type": "System User",
}
self.server = Server(host=self.ldap_server, port=389, get_info=self.LDAP_SCHEMA)
self.connection = Connection(
self.server,
user=self.base_dn,
password=self.base_password,
cls.server = Server(host=cls.ldap_server, port=389, get_info=cls.LDAP_SCHEMA)
cls.connection = Connection(
cls.server,
user=cls.base_dn,
password=cls.base_password,
read_only=True,
client_strategy=MOCK_SYNC,
)
self.connection.strategy.entries_from_json(
os.path.abspath(os.path.dirname(__file__)) + "/" + self.LDAP_LDIF_JSON
cls.connection.strategy.entries_from_json(
f"{os.path.abspath(os.path.dirname(__file__))}/{cls.LDAP_LDIF_JSON}"
)
self.connection.bind()
cls.connection.bind()
@classmethod
def tearDownClass(self):
try:
def tearDownClass(cls):
with contextlib.suppress(Exception):
frappe.get_doc("LDAP Settings").delete()
except Exception:
pass
try:
# return doc back to user data
self.user_ldap_settings.save()
except Exception:
pass
# return doc back to user data
with contextlib.suppress(Exception):
cls.user_ldap_settings.save()
# Clean-up test users
self.clean_test_users()
cls.clean_test_users()
# Clear OpenLDAP connection
self.connection = None
cls.connection = None
@mock_ldap_connection
def test_mandatory_fields(self):
mandatory_fields = [
"ldap_server_url",
"ldap_directory_server",
@ -195,26 +177,14 @@ class LDAP_TestCase:
] # fields that are required to have ldap functioning need to be mandatory
for mandatory_field in mandatory_fields:
localdoc = self.doc.copy()
localdoc[mandatory_field] = ""
try:
with contextlib.suppress(MandatoryError, ValidationError):
frappe.get_doc(localdoc).save()
self.fail(f"Document LDAP Settings field [{mandatory_field}] is not mandatory")
except frappe.exceptions.MandatoryError:
pass
except frappe.exceptions.ValidationError:
if mandatory_field == "ldap_search_string":
# additional validation is done on this field, pass in this instance
pass
for non_mandatory_field in self.doc: # Ensure remaining fields have not been made mandatory
if non_mandatory_field == "doctype" or non_mandatory_field in mandatory_fields:
continue
@ -222,15 +192,12 @@ class LDAP_TestCase:
localdoc[non_mandatory_field] = ""
try:
frappe.get_doc(localdoc).save()
except frappe.exceptions.MandatoryError:
except MandatoryError:
self.fail(f"Document LDAP Settings field [{non_mandatory_field}] should not be mandatory")
@mock_ldap_connection
def test_validation_ldap_search_string(self):
invalid_ldap_search_strings = [
"",
"uid={0}",
@ -242,19 +209,26 @@ class LDAP_TestCase:
] # ldap search string must be enclosed in '()' for ldap search to work for finding user and have the same number of opening and closing brackets.
for invalid_search_string in invalid_ldap_search_strings:
localdoc = self.doc.copy()
localdoc["ldap_search_string"] = invalid_search_string
try:
with contextlib.suppress(ValidationError):
frappe.get_doc(localdoc).save()
self.fail(f"LDAP search string [{invalid_search_string}] should not validate")
except frappe.exceptions.ValidationError:
pass
def test_connect_to_ldap(self):
# prevent these parameters for security or lack of the und user from being able to configure
prevent_connection_parameters = {
"mode": {
"IP_V4_ONLY": "Locks the user to IPv4 without frappe providing a way to configure",
"IP_V6_ONLY": "Locks the user to IPv6 without frappe providing a way to configure",
},
"auto_bind": {
"NONE": "ldap3.Connection must autobind with base_dn",
"NO_TLS": "ldap3.Connection must have TLS",
"TLS_AFTER_BIND": "[Security] ldap3.Connection TLS bind must occur before bind",
},
}
# setup a clean doc with ldap disabled so no validation occurs (this is tested seperatly)
local_doc = self.doc.copy()
@ -262,48 +236,25 @@ class LDAP_TestCase:
self.test_class = LDAPSettings(self.doc)
with mock.patch("ldap3.Server") as ldap3_server_method:
with mock.patch("ldap3.Connection") as ldap3_connection_method:
ldap3_connection_method.return_value = self.connection
with mock.patch("ldap3.Connection", return_value=self.connection) as ldap3_connection_method:
with mock.patch("ldap3.Tls") as ldap3_Tls_method:
function_return = self.test_class.connect_to_ldap(
base_dn=self.base_dn, password=self.base_password
)
args, kwargs = ldap3_connection_method.call_args
prevent_connection_parameters = {
# prevent these parameters for security or lack of the und user from being able to configure
"mode": {
"IP_V4_ONLY": "Locks the user to IPv4 without frappe providing a way to configure",
"IP_V6_ONLY": "Locks the user to IPv6 without frappe providing a way to configure",
},
"auto_bind": {
"NONE": "ldap3.Connection must autobind with base_dn",
"NO_TLS": "ldap3.Connection must have TLS",
"TLS_AFTER_BIND": "[Security] ldap3.Connection TLS bind must occur before bind",
},
}
for connection_arg in kwargs:
if (
connection_arg in prevent_connection_parameters
and kwargs[connection_arg] in prevent_connection_parameters[connection_arg]
):
self.fail(
"ldap3.Connection was called with {}, failed reason: [{}]".format(
kwargs[connection_arg],
prevent_connection_parameters[connection_arg][kwargs[connection_arg]],
)
f"ldap3.Connection was called with {kwargs[connection_arg]}, failed reason: [{prevent_connection_parameters[connection_arg][kwargs[connection_arg]]}]"
)
tls_version = ssl.PROTOCOL_TLS_CLIENT
if local_doc["require_trusted_certificate"] == "Yes":
tls_validate = ssl.CERT_REQUIRED
tls_version = ssl.PROTOCOL_TLS_CLIENT
tls_configuration = ldap3.Tls(validate=tls_validate, version=tls_version)
self.assertTrue(
@ -313,7 +264,6 @@ class LDAP_TestCase:
else:
tls_validate = ssl.CERT_NONE
tls_version = ssl.PROTOCOL_TLS_CLIENT
tls_configuration = ldap3.Tls(validate=tls_validate, version=tls_version)
self.assertTrue(kwargs["auto_bind"], "ldap3.Connection must autobind")
@ -347,7 +297,7 @@ class LDAP_TestCase:
)
self.assertTrue(
type(function_return) is ldap3.core.connection.Connection,
type(function_return) is Connection,
"The return type must be of ldap3.Connection",
)
@ -364,24 +314,20 @@ class LDAP_TestCase:
@mock_ldap_connection
def test_get_ldap_client_settings(self):
result = self.test_class.get_ldap_client_settings()
self.assertIsInstance(result, dict)
self.assertTrue(result["enabled"] == self.doc["enabled"]) # settings should match doc
localdoc = self.doc.copy()
localdoc["enabled"] = False
frappe.get_doc(localdoc).save()
result = self.test_class.get_ldap_client_settings()
self.assertFalse(result["enabled"]) # must match the edited doc
@mock_ldap_connection
def test_update_user_fields(self):
test_user_data = {
"username": "posix.user",
"email": "posix.user1@unit.testing",
@ -391,11 +337,8 @@ class LDAP_TestCase:
"phone": "08 1234 5678",
"mobile_no": "0421 123 456",
}
test_user = frappe.get_doc("User", test_user_data["email"])
self.test_class.update_user_fields(test_user, test_user_data)
updated_user = frappe.get_doc("User", test_user_data["email"])
self.assertTrue(updated_user.middle_name == test_user_data["middle_name"])
@ -403,9 +346,23 @@ class LDAP_TestCase:
self.assertTrue(updated_user.phone == test_user_data["phone"])
self.assertTrue(updated_user.mobile_no == test_user_data["mobile_no"])
self.assertEqual(updated_user.user_type, self.test_class.default_user_type)
self.assertIn(self.test_class.default_role, frappe.get_roles(updated_user.name))
@mock_ldap_connection
def test_create_website_user(self):
new_test_user_data = {
"username": "website_ldap_user.test",
"email": "website_ldap_user@test.com",
"first_name": "Website User - LDAP Test",
}
self.test_class.default_user_type = "Website User"
self.test_class.create_or_update_user(user_data=new_test_user_data, groups=[])
new_user = frappe.get_doc("User", new_test_user_data["email"])
self.assertEqual(new_user.user_type, "Website User")
@mock_ldap_connection
def test_sync_roles(self):
if self.TEST_LDAP_SERVER.lower() == "openldap":
test_user_data = {
"posix.user1": [
@ -457,9 +414,8 @@ class LDAP_TestCase:
user.insert(ignore_permissions=True)
for test_user in test_user_data:
test_user_doc = frappe.get_doc("User", test_user + "@unit.testing")
test_user_roles = frappe.get_roles(test_user + "@unit.testing")
test_user_doc = frappe.get_doc("User", f"{test_user}@unit.testing")
test_user_roles = frappe.get_roles(f"{test_user}@unit.testing")
self.assertTrue(
len(test_user_roles) == 2, "User should only be a part of the All and Guest roles"
@ -467,28 +423,22 @@ class LDAP_TestCase:
self.test_class.sync_roles(test_user_doc, test_user_data[test_user]) # update user roles
frappe.get_doc("User", test_user + "@unit.testing")
updated_user_roles = frappe.get_roles(test_user + "@unit.testing")
frappe.get_doc("User", f"{test_user}@unit.testing")
updated_user_roles = frappe.get_roles(f"{test_user}@unit.testing")
self.assertTrue(
len(updated_user_roles) == len(test_user_data[test_user]),
"syncing of the user roles failed. {} != {} for user {}".format(
len(updated_user_roles), len(test_user_data[test_user]), test_user
),
f"syncing of the user roles failed. {len(updated_user_roles)} != {len(test_user_data[test_user])} for user {test_user}",
)
for user_role in updated_user_roles: # match each users role mapped to ldap groups
self.assertTrue(
role_to_group_map[user_role] in test_user_data[test_user],
"during sync_roles(), the user was given role {} which should not have occured".format(
user_role
),
f"during sync_roles(), the user was given role {user_role} which should not have occured",
)
@mock_ldap_connection
def test_create_or_update_user(self):
test_user_data = {
"posix.user1": [
"Users",
@ -498,28 +448,21 @@ class LDAP_TestCase:
"frappe_default_guest",
],
}
test_user = "posix.user1"
frappe.get_doc("User", test_user + "@unit.testing").delete() # remove user 1
frappe.get_doc("User", f"{test_user}@unit.testing").delete()
with self.assertRaises(
frappe.exceptions.DoesNotExistError
): # ensure user deleted so function can be tested
frappe.get_doc("User", test_user + "@unit.testing")
frappe.get_doc("User", f"{test_user}@unit.testing")
with mock.patch(
"frappe.integrations.doctype.ldap_settings.ldap_settings.LDAPSettings.update_user_fields"
) as update_user_fields_method:
update_user_fields_method.return_value = None
with mock.patch(
"frappe.integrations.doctype.ldap_settings.ldap_settings.LDAPSettings.sync_roles"
) as sync_roles_method:
sync_roles_method.return_value = None
# New user
self.test_class.create_or_update_user(self.user1doc, test_user_data[test_user])
@ -539,14 +482,11 @@ class LDAP_TestCase:
@mock_ldap_connection
def test_get_ldap_attributes(self):
method_return = self.test_class.get_ldap_attributes()
self.assertTrue(type(method_return) is list)
@mock_ldap_connection
def test_fetch_ldap_groups(self):
if self.TEST_LDAP_SERVER.lower() == "openldap":
test_users = {"posix.user": ["Users", "Administrators"], "posix.user2": ["Users", "Group3"]}
elif self.TEST_LDAP_SERVER.lower() == "active directory":
@ -556,7 +496,6 @@ class LDAP_TestCase:
}
for test_user in test_users:
self.connection.search(
search_base=self.ldap_user_path,
search_filter=self.TEST_LDAP_SEARCH_STRING.format(test_user),
@ -569,18 +508,13 @@ class LDAP_TestCase:
self.assertTrue(len(method_return) == len(test_users[test_user]))
for returned_group in method_return:
self.assertTrue(returned_group in test_users[test_user])
@mock_ldap_connection
def test_authenticate(self):
with mock.patch(
"frappe.integrations.doctype.ldap_settings.ldap_settings.LDAPSettings.fetch_ldap_groups"
) as fetch_ldap_groups_function:
fetch_ldap_groups_function.return_value = None
self.assertTrue(self.test_class.authenticate("posix.user", "posix_user_password"))
self.assertTrue(
@ -599,25 +533,19 @@ class LDAP_TestCase:
] # All invalid users should return 'invalid username or password'
for username, password in enumerate(invalid_users):
with self.assertRaises(frappe.exceptions.ValidationError) as display_massage:
self.test_class.authenticate(username, password)
self.assertTrue(
str(display_massage.exception).lower() == "invalid username or password",
"invalid credentials passed authentication [user: {}, password: {}]".format(
username, password
),
f"invalid credentials passed authentication [user: {username}, password: {password}]",
)
@mock_ldap_connection
def test_complex_ldap_search_filter(self):
ldap_search_filters = self.TEST_VALUES_LDAP_COMPLEX_SEARCH_STRING
for search_filter in ldap_search_filters:
self.test_class.ldap_search_string = search_filter
if (
@ -634,55 +562,44 @@ class LDAP_TestCase:
self.assertTrue(self.test_class.authenticate("posix.user", "posix_user_password"))
def test_reset_password(self):
self.test_class = LDAPSettings(self.doc)
# Create a clean doc
localdoc = self.doc.copy()
localdoc["enabled"] = False
frappe.get_doc(localdoc).save()
with mock.patch(
"frappe.integrations.doctype.ldap_settings.ldap_settings.LDAPSettings.connect_to_ldap"
"frappe.integrations.doctype.ldap_settings.ldap_settings.LDAPSettings.connect_to_ldap",
return_value=self.connection,
) as connect_to_ldap:
connect_to_ldap.return_value = self.connection
with self.assertRaises(
frappe.exceptions.ValidationError
) as validation: # Fail if username string used
self.test_class.reset_password("posix.user", "posix_user_password")
self.assertTrue(str(validation.exception) == "No LDAP User found for email: posix.user")
try:
with contextlib.suppress(Exception):
self.test_class.reset_password(
"posix.user1@unit.testing", "posix_user_password"
) # Change Password
except Exception: # An exception from the tested class is ok, as long as the connection to LDAP was made writeable
pass
connect_to_ldap.assert_called_with(self.base_dn, self.base_password, read_only=False)
@mock_ldap_connection
def test_convert_ldap_entry_to_dict(self):
self.connection.search(
search_base=self.ldap_user_path,
search_filter=self.TEST_LDAP_SEARCH_STRING.format("posix.user"),
attributes=self.test_class.get_ldap_attributes(),
)
test_ldap_entry = self.connection.entries[0]
method_return = self.test_class.convert_ldap_entry_to_dict(test_ldap_entry)
self.assertTrue(type(method_return) is dict) # must be dict
self.assertTrue(len(method_return) == 6) # there are 6 fields in mock_ldap for use
class Test_OpenLDAP(LDAP_TestCase, unittest.TestCase):
class Test_OpenLDAP(LDAP_TestCase, TestCase):
TEST_LDAP_SERVER = "OpenLDAP"
TEST_LDAP_SEARCH_STRING = "(uid={0})"
DOCUMENT_GROUP_MAPPINGS = [
@ -706,7 +623,7 @@ class Test_OpenLDAP(LDAP_TestCase, unittest.TestCase):
]
class Test_ActiveDirectory(LDAP_TestCase, unittest.TestCase):
class Test_ActiveDirectory(LDAP_TestCase, TestCase):
TEST_LDAP_SERVER = "Active Directory"
TEST_LDAP_SEARCH_STRING = "(samaccountname={0})"
DOCUMENT_GROUP_MAPPINGS = [

View file

@ -14,7 +14,9 @@ def get_oauth_settings():
"""Returns oauth settings"""
out = frappe._dict(
{
"skip_authorization": frappe.db.get_value("OAuth Provider Settings", None, "skip_authorization")
"skip_authorization": frappe.db.get_single_value(
"OAuth Provider Settings", "skip_authorization"
)
}
)

View file

@ -76,8 +76,8 @@ def take_backups_monthly():
def take_backups_if(freq):
if cint(frappe.db.get_value("S3 Backup Settings", None, "enabled")):
if frappe.db.get_value("S3 Backup Settings", None, "frequency") == freq:
if cint(frappe.db.get_single_value("S3 Backup Settings", "enabled")):
if frappe.db.get_single_value("S3 Backup Settings", "frequency") == freq:
take_backups_s3()

View file

@ -0,0 +1,201 @@
import json
from google.oauth2.credentials import Credentials
from googleapiclient.discovery import build
from requests import get, post
import frappe
from frappe.utils import get_request_site_address
CALLBACK_METHOD = "/api/method/frappe.integrations.google_oauth.callback"
_SCOPES = {
"mail": ("https://mail.google.com/"),
"contacts": ("https://www.googleapis.com/auth/contacts"),
"drive": ("https://www.googleapis.com/auth/drive"),
"indexing": ("https://www.googleapis.com/auth/indexing"),
}
_SERVICES = {
"contacts": ("people", "v1"),
"drive": ("drive", "v3"),
"indexing": ("indexing", "v3"),
}
_DOMAIN_CALLBACK_METHODS = {
"mail": "frappe.email.oauth.authorize_google_access",
"contacts": "frappe.integrations.doctype.google_contacts.google_contacts.authorize_access",
"drive": "frappe.integrations.doctype.google_drive.google_drive.authorize_access",
"indexing": "frappe.website.doctype.website_settings.google_indexing.authorize_access",
}
class GoogleAuthenticationError(Exception):
pass
class GoogleOAuth:
OAUTH_URL = "https://oauth2.googleapis.com/token"
def __init__(self, domain: str, validate: bool = True):
self.google_settings = frappe.get_single("Google Settings")
self.domain = domain.lower()
self.scopes = (
" ".join(_SCOPES[self.domain])
if isinstance(_SCOPES[self.domain], (list, tuple))
else _SCOPES[self.domain]
)
if validate:
self.validate_google_settings()
def validate_google_settings(self):
google_settings = "<a href='/app/google-settings'>Google Settings</a>"
if not self.google_settings.enable:
frappe.throw(frappe._("Please enable {} before continuing.").format(google_settings))
if not (self.google_settings.client_id and self.google_settings.client_secret):
frappe.throw(frappe._("Please update {} before continuing.").format(google_settings))
def authorize(self, oauth_code: str) -> dict[str, str | int]:
"""Returns a dict with access and refresh token.
:param oauth_code: code got back from google upon successful auhtorization
"""
data = {
"code": oauth_code,
"client_id": self.google_settings.client_id,
"client_secret": self.google_settings.get_password(
fieldname="client_secret", raise_exception=False
),
"grant_type": "authorization_code",
"scope": self.scopes,
"redirect_uri": get_request_site_address(True) + CALLBACK_METHOD,
}
return handle_response(
post(self.OAUTH_URL, data=data).json(),
"Google Oauth Authorization Error",
"Something went wrong during the authorization.",
)
def refresh_access_token(self, refresh_token: str) -> dict[str, str | int]:
"""Refreshes google access token using refresh token"""
data = {
"client_id": self.google_settings.client_id,
"client_secret": self.google_settings.get_password(
fieldname="client_secret", raise_exception=False
),
"refresh_token": refresh_token,
"grant_type": "refresh_token",
"scope": self.scopes,
}
return handle_response(
post(self.OAUTH_URL, data=data).json(),
"Google Oauth Access Token Refresh Error",
"Something went wrong during the access token generation.",
raise_err=True,
)
def get_authentication_url(self, state: dict[str, str]) -> dict[str, str]:
"""Returns google authentication url.
:param state: dict of values which you need on callback (for calling methods, redirection back to the form, doc name, etc)
"""
state.update({"domain": self.domain})
state = json.dumps(state)
callback_url = get_request_site_address(True) + CALLBACK_METHOD
return {
"url": "https://accounts.google.com/o/oauth2/v2/auth?"
+ "access_type=offline&response_type=code&prompt=consent&include_granted_scopes=true&"
+ "client_id={}&scope={}&redirect_uri={}&state={}".format(
self.google_settings.client_id, self.scopes, callback_url, state
)
}
def get_google_service_object(self, access_token: str, refresh_token: str):
"""Returns google service object"""
credentials_dict = {
"token": access_token,
"refresh_token": refresh_token,
"token_uri": self.OAUTH_URL,
"client_id": self.google_settings.client_id,
"client_secret": self.google_settings.get_password(
fieldname="client_secret", raise_exception=False
),
"scopes": self.scopes,
}
return build(
serviceName=_SERVICES[self.domain][0],
version=_SERVICES[self.domain][1],
credentials=Credentials(**credentials_dict),
static_discovery=False,
)
def handle_response(
response: dict[str, str | int],
error_title: str,
error_message: str,
raise_err: bool = False,
):
if "error" in response:
frappe.log_error(
frappe._(error_title), frappe._(response.get("error_description", error_message))
)
if raise_err:
frappe.throw(frappe._(error_title), GoogleAuthenticationError, frappe._(error_message))
return {}
return response
def is_valid_access_token(access_token: str) -> bool:
response = get(
"https://oauth2.googleapis.com/tokeninfo", params={"access_token": access_token}
).json()
if "error" in response:
return False
return True
@frappe.whitelist(methods=["GET"])
def callback(state: str, code: str = None, error: str = None) -> None:
"""Common callback for google integrations.
Invokes functions using `frappe.get_attr` and also adds required (keyworded) arguments
along with committing and redirecting us back to frappe site."""
state = json.loads(state)
redirect = state.pop("redirect", "/app")
success_query_param = state.pop("success_query_param", "")
failure_query_param = state.pop("failure_query_param", "")
if not error:
if (domain := state.pop("domain")) in _DOMAIN_CALLBACK_METHODS:
state.update({"code": code})
frappe.get_attr(_DOMAIN_CALLBACK_METHODS[domain])(**state)
# GET request, hence using commit to persist changes
frappe.db.commit() # nosemgrep
else:
return frappe.respond_as_web_page(
"Invalid Google Callback",
"The callback domain provided is not valid for Google Authentication",
http_status_code=400,
indicator_color="red",
width=640,
)
frappe.local.response["type"] = "redirect"
frappe.local.response[
"location"
] = f"{redirect}?{failure_query_param if error else success_query_param}"

View file

@ -13,6 +13,7 @@ import frappe.permissions
import frappe.share
from frappe import _
from frappe.core.doctype.server_script.server_script_utils import get_server_script_map
from frappe.database.utils import FallBackDateTimeStr
from frappe.model import optional_fields
from frappe.model.meta import get_table_columns
from frappe.model.utils.user_settings import get_user_settings, update_user_settings
@ -632,11 +633,11 @@ class DatabaseQuery:
date_range = get_date_range(f.operator.lower(), f.value)
f.operator = "Between"
f.value = date_range
fallback = "'0001-01-01 00:00:00'"
fallback = f"'{FallBackDateTimeStr}'"
if f.operator in (">", "<") and (f.fieldname in ("creation", "modified")):
value = cstr(f.value)
fallback = "'0001-01-01 00:00:00'"
fallback = f"'{FallBackDateTimeStr}'"
elif f.operator.lower() in ("between") and (
f.fieldname in ("creation", "modified")
@ -644,7 +645,7 @@ class DatabaseQuery:
):
value = get_between_date_filter(f.value, df)
fallback = "'0001-01-01 00:00:00'"
fallback = f"'{FallBackDateTimeStr}'"
elif f.operator.lower() == "is":
if f.value == "set":
@ -665,7 +666,7 @@ class DatabaseQuery:
elif (df and df.fieldtype == "Datetime") or isinstance(f.value, datetime):
value = frappe.db.format_datetime(f.value)
fallback = "'0001-01-01 00:00:00'"
fallback = f"'{FallBackDateTimeStr}'"
elif df and df.fieldtype == "Time":
value = get_time(f.value).strftime("%H:%M:%S.%f")
@ -719,7 +720,7 @@ class DatabaseQuery:
return condition
def build_match_conditions(self, as_condition=True):
def build_match_conditions(self, as_condition=True) -> str | list:
"""add match conditions if applicable"""
self.match_filters = []
self.match_conditions = []

View file

@ -1092,7 +1092,9 @@ class Document(BaseDocument):
self.run_method("on_update_after_submit")
self.clear_cache()
self.notify_update()
if self.flags.get("notify_update", True):
self.notify_update()
update_global_search(self)
@ -1145,7 +1147,7 @@ class Document(BaseDocument):
:param fieldname: fieldname of the property to be updated, or a {"field":"value"} dictionary
:param value: value of the property to be updated
:param update_modified: default True. updates the `modified` and `modified_by` properties
:param notify: default False. run doc.notify_updated() to send updates via socketio
:param notify: default False. run doc.notify_update() to send updates via socketio
:param commit: default False. run frappe.db.commit()
"""
if isinstance(fieldname, dict):

View file

@ -1,6 +1,7 @@
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import datetime
import re
from typing import TYPE_CHECKING, Callable, Optional
@ -23,6 +24,17 @@ NAMING_SERIES_PATTERN = re.compile(r"^[\w\- \/.#{}]+$", re.UNICODE)
BRACED_PARAMS_PATTERN = re.compile(r"(\{[\w | #]+\})")
# Types that can be using in naming series fields
NAMING_SERIES_PART_TYPES = (
int,
str,
datetime.datetime,
datetime.date,
datetime.time,
datetime.timedelta,
)
class InvalidNamingSeriesError(frappe.ValidationError):
pass
@ -298,6 +310,9 @@ def parse_naming_series(
series_set = False
today = now_datetime()
for e in parts:
if not e:
continue
part = ""
if e.startswith("#"):
if not series_set:
@ -320,14 +335,16 @@ def parse_naming_series(
part = frappe.defaults.get_user_default("fiscal_year")
elif e.startswith("{") and doc:
e = e.replace("{", "").replace("}", "")
part = (cstr(doc.get(e)) or "").strip()
part = doc.get(e)
elif doc and doc.get(e):
part = (cstr(doc.get(e)) or "").strip()
part = doc.get(e)
else:
part = e
if isinstance(part, str):
name += part
elif isinstance(part, NAMING_SERIES_PART_TYPES):
name += cstr(part).strip()
return name

View file

@ -1,5 +1,6 @@
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
from types import NoneType
from typing import TYPE_CHECKING
import frappe
@ -46,7 +47,7 @@ def update_document_title(
# TODO: omit this after runtime type checking (ref: https://github.com/frappe/frappe/pull/14927)
for obj in [docname, updated_title, updated_name]:
if not isinstance(obj, (str, type(None))):
if not isinstance(obj, (str, NoneType)):
frappe.throw(f"{obj=} must be of type str or None")
# handle bad API usages

View file

@ -183,6 +183,8 @@ frappe.patches.v13_0.queryreport_columns
frappe.patches.v13_0.jinja_hook
frappe.patches.v13_0.update_notification_channel_if_empty
frappe.patches.v13_0.set_first_day_of_the_week
frappe.patches.v13_0.encrypt_2fa_secrets
frappe.patches.v13_0.reset_corrupt_defaults
execute:frappe.reload_doc('custom', 'doctype', 'custom_field')
frappe.patches.v14_0.update_workspace2 # 20.09.2021
frappe.patches.v14_0.save_ratings_in_fraction #23-12-2021
@ -192,6 +194,8 @@ frappe.patches.v14_0.reset_creation_datetime
frappe.patches.v14_0.remove_is_first_startup
frappe.patches.v14_0.clear_long_pending_stale_logs
frappe.patches.v14_0.log_settings_migration
frappe.patches.v14_0.setup_likes_from_feedback
frappe.patches.v14_0.update_webforms
[post_model_sync]
frappe.patches.v14_0.drop_data_import_legacy
@ -204,3 +208,4 @@ frappe.patches.v14_0.update_auto_account_deletion_duration
frappe.patches.v14_0.update_integration_request
frappe.patches.v14_0.set_document_expiry_default
frappe.patches.v14_0.delete_data_migration_tool
frappe.patches.v14_0.set_suspend_email_queue_default

View file

@ -4,6 +4,6 @@ from frappe.utils import cint
def execute():
frappe.reload_doctype("Dropbox Settings")
check_dropbox_enabled = cint(frappe.db.get_value("Dropbox Settings", None, "enabled"))
check_dropbox_enabled = cint(frappe.db.get_single_value("Dropbox Settings", "enabled"))
if check_dropbox_enabled == 1:
frappe.db.set_value("Dropbox Settings", None, "file_backup", 1)

View file

@ -1,5 +1,3 @@
from pymysql import InternalError
import frappe
# This patch deletes all the duplicate indexes created for same column
@ -51,5 +49,5 @@ def execute():
for query in query_list:
try:
frappe.db.sql(query)
except InternalError:
except frappe.db.InternalError:
pass

Some files were not shown because too many files have changed in this diff Show more