1
0
Fork 0

Merge branch 'main' into main

This commit is contained in:
Irina Truong 2023-10-06 16:06:01 -07:00 committed by GitHub
commit aef66a5822
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 213 additions and 61 deletions

View File

@ -72,7 +72,7 @@ jobs:
pip install keyrings.alt>=3.1
- name: Run unit tests
run: coverage run --source pgcli -m py.test
run: coverage run --source pgcli -m pytest
- name: Run integration tests
env:

View File

@ -126,6 +126,8 @@ Contributors:
* Rigo Neri (rigoneri)
* Anna Glasgall (annathyst)
* Andy Schoenberger (andyscho)
* Damien Baty (dbaty)
* blag
* Rob Berry (rob-b)
Creator:

View File

@ -157,8 +157,9 @@ get this running in a development setup.
https://github.com/dbcli/pgcli/blob/master/DEVELOP.rst
Please feel free to reach out to me if you need help.
My email: amjith.r@gmail.com, Twitter: `@amjithr <http://twitter.com/amjithr>`_
Please feel free to reach out to us if you need help.
* Amjith, pgcli author: amjith.r@gmail.com, Twitter: `@amjithr <http://twitter.com/amjithr>`_
* Irina, pgcli maintainer: i.chernyavska@gmail.com, Twitter: `@amjithr <http://twitter.com/irinatruong>`_
Detailed Installation Instructions:
-----------------------------------

View File

@ -1,9 +1,13 @@
========
Upcoming
========
Features:
---------
* Ask for confirmation when quitting cli while a transaction is ongoing.
* New `destructive_statements_require_transaction` config option to refuse to execute a
destructive SQL statement if outside a transaction. This option is off by default.
* Changed the `destructive_warning` config to be a list of commands that are considered
destructive. This would allow you to be warned on `create`, `grant`, or `insert` queries.
* Destructive warnings will now include the alias dsn connection string name if provided (-D option).
@ -12,12 +16,22 @@ Features:
Also prevents getting stuck in a retry loop.
* Config option to not restart connection when cancelling a `destructive_warning` query. By default,
it will now not restart.
* Config option to always run with a single connection.
* Add comment explaining default LESS environment variable behavior and change example pager setting.
* Added \echo & \qecho special commands. ([issue 1335](https://github.com/dbcli/pgcli/issues/1335)).
Bug fixes:
----------
* Fix \ev not producing a correctly quoted "schema"."view"
* Fix 'invalid connection option "dsn"' ([issue 1373](https://github.com/dbcli/pgcli/issues/1373)).
* Fix explain mode when used with `expand`, `auto_expand`, or `--explain-vertical-output` ([issue 1393](https://github.com/dbcli/pgcli/issues/1393)).
* Fix sql-insert format emits NULL as 'None' ([issue 1408](https://github.com/dbcli/pgcli/issues/1408)).
* Improve check for prompt-toolkit 3.0.6 ([issue 1416](https://github.com/dbcli/pgcli/issues/1416)).
* Allow specifying an `alias_map_file` in the config that will use
predetermined table aliases instead of generating aliases programmatically on
the fly
3.5.0 (2022/09/15):
===================

View File

@ -26,7 +26,9 @@ def keyring_initialize(keyring_enabled, *, logger):
try:
keyring = importlib.import_module("keyring")
except Exception as e: # ImportError for Python 2, ModuleNotFoundError for Python 3
except (
ModuleNotFoundError
) as e: # ImportError for Python 2, ModuleNotFoundError for Python 3
logger.warning("import keyring failed: %r.", e)

View File

@ -6,7 +6,6 @@ from .pgcompleter import PGCompleter
class CompletionRefresher:
refreshers = OrderedDict()
def __init__(self):

View File

@ -10,7 +10,8 @@ class ExplainOutputFormatter:
self.max_width = max_width
def format_output(self, cur, headers, **output_kwargs):
(data,) = cur.fetchone()
# explain query results should always contain 1 row each
[(data,)] = list(cur)
explain_list = json.loads(data)
visualizer = Visualizer(self.max_width)
for explain in explain_list:

View File

@ -64,15 +64,13 @@ from .config import (
from .key_bindings import pgcli_bindings
from .packages.formatter.sqlformatter import register_new_formatter
from .packages.prompt_utils import confirm, confirm_destructive_query
from .packages.parseutils import is_destructive
from .packages.parseutils import parse_destructive_warning
from .__init__ import __version__
click.disable_unicode_literals_warning = True
try:
from urlparse import urlparse, unquote, parse_qs
except ImportError:
from urllib.parse import urlparse, unquote, parse_qs
from urllib.parse import urlparse
from getpass import getuser
@ -234,6 +232,9 @@ class PGCli:
self.destructive_warning_restarts_connection = c["main"].as_bool(
"destructive_warning_restarts_connection"
)
self.destructive_statements_require_transaction = c["main"].as_bool(
"destructive_statements_require_transaction"
)
self.less_chatty = bool(less_chatty) or c["main"].as_bool("less_chatty")
self.null_string = c["main"].get("null_string", "<null>")
@ -264,6 +265,9 @@ class PGCli:
# Initialize completer
smart_completion = c["main"].as_bool("smart_completion")
keyword_casing = c["main"]["keyword_casing"]
single_connection = single_connection or c["main"].as_bool(
"always_use_single_connection"
)
self.settings = {
"casing_file": get_casing_file(c),
"generate_casing_file": c["main"].as_bool("generate_casing_file"),
@ -299,7 +303,6 @@ class PGCli:
raise PgCliQuitError
def register_special_commands(self):
self.pgspecial.register(
self.change_db,
"\\c",
@ -361,6 +364,23 @@ class PGCli:
"Change the table format used to output results",
)
self.pgspecial.register(
self.echo,
"\\echo",
"\\echo [string]",
"Echo a string to stdout",
)
self.pgspecial.register(
self.echo,
"\\qecho",
"\\qecho [string]",
"Echo a string to the query output channel.",
)
def echo(self, pattern, **_):
return [(None, None, None, pattern)]
def change_table_format(self, pattern, **_):
try:
if pattern not in TabularOutputFormatter().supported_formats:
@ -430,15 +450,20 @@ class PGCli:
except OSError as e:
return [(None, None, None, str(e), "", False, True)]
if (
self.destructive_warning
and confirm_destructive_query(
if self.destructive_warning:
if (
self.destructive_statements_require_transaction
and not self.pgexecute.valid_transaction()
and is_destructive(query, self.destructive_warning)
):
message = "Destructive statements must be run within a transaction. Command execution stopped."
return [(None, None, None, message)]
destroy = confirm_destructive_query(
query, self.destructive_warning, self.dsn_alias
)
is False
):
message = "Wise choice. Command execution stopped."
return [(None, None, None, message)]
if destroy is False:
message = "Wise choice. Command execution stopped."
return [(None, None, None, message)]
on_error_resume = self.on_error == "RESUME"
return self.pgexecute.run(
@ -466,7 +491,6 @@ class PGCli:
return [(None, None, None, message, "", True, True)]
def initialize_logging(self):
log_file = self.config["main"]["log_file"]
if log_file == "default":
log_file = config_location() + "log"
@ -704,7 +728,16 @@ class PGCli:
try:
if self.destructive_warning:
destroy = confirm = confirm_destructive_query(
if (
self.destructive_statements_require_transaction
and not self.pgexecute.valid_transaction()
and is_destructive(text, self.destructive_warning)
):
click.secho(
"Destructive statements must be run within a transaction."
)
raise KeyboardInterrupt
destroy = confirm_destructive_query(
text, self.destructive_warning, self.dsn_alias
)
if destroy is False:
@ -733,7 +766,7 @@ class PGCli:
click.secho(str(e), err=True, fg="red")
if handle_closed_connection:
self._handle_server_closed_connection(text)
except (PgCliQuitError, EOFError) as e:
except (PgCliQuitError, EOFError):
raise
except Exception as e:
logger.error("sql: %r, error: %r", text, e)
@ -741,7 +774,9 @@ class PGCli:
click.secho(str(e), err=True, fg="red")
else:
try:
if self.output_file and not text.startswith(("\\o ", "\\? ")):
if self.output_file and not text.startswith(
("\\o ", "\\? ", "\\echo ")
):
try:
with open(self.output_file, "a", encoding="utf-8") as f:
click.echo(text, file=f)
@ -785,6 +820,34 @@ class PGCli:
logger.debug("Search path: %r", self.completer.search_path)
return query
def _check_ongoing_transaction_and_allow_quitting(self):
"""Return whether we can really quit, possibly by asking the
user to confirm so if there is an ongoing transaction.
"""
if not self.pgexecute.valid_transaction():
return True
while 1:
try:
choice = click.prompt(
"A transaction is ongoing. Choose `c` to COMMIT, `r` to ROLLBACK, `a` to abort exit.",
default="a",
)
except click.Abort:
# Print newline if user aborts with `^C`, otherwise
# pgcli's prompt will be printed on the same line
# (just after the confirmation prompt).
click.echo(None, err=False)
choice = "a"
choice = choice.lower()
if choice == "a":
return False # do not quit
if choice == "c":
query = self.execute_command("commit")
return query.successful # quit only if query is successful
if choice == "r":
query = self.execute_command("rollback")
return query.successful # quit only if query is successful
def run_cli(self):
logger = self.logger
@ -807,6 +870,10 @@ class PGCli:
text = self.prompt_app.prompt()
except KeyboardInterrupt:
continue
except EOFError:
if not self._check_ongoing_transaction_and_allow_quitting():
continue
raise
try:
text = self.handle_editor_command(text)
@ -816,7 +883,12 @@ class PGCli:
click.secho(str(e), err=True, fg="red")
continue
self.handle_watch_command(text)
try:
self.handle_watch_command(text)
except PgCliQuitError:
if not self._check_ongoing_transaction_and_allow_quitting():
continue
raise
self.now = dt.datetime.today()
@ -1600,7 +1672,8 @@ def format_output(title, cur, headers, status, settings, explain_mode=False):
first_line = next(formatted)
formatted = itertools.chain([first_line], formatted)
if (
not expanded
not explain_mode
and not expanded
and max_width
and len(strip_ansi(first_line)) > max_width
and headers

View File

@ -14,10 +14,13 @@ preprocessors = ()
def escape_for_sql_statement(value):
if value is None:
return "NULL"
if isinstance(value, bytes):
return f"X'{value.hex()}'"
else:
return "'{}'".format(value)
return "'{}'".format(value)
def adapter(data, headers, table_format=None, **kwargs):
@ -29,7 +32,7 @@ def adapter(data, headers, table_format=None, **kwargs):
else:
table_name = table[1]
else:
table_name = '"DUAL"'
table_name = "DUAL"
if table_format == "sql-insert":
h = '", "'.join(headers)
yield 'INSERT INTO "{}" ("{}") VALUES'.format(table_name, h)

View File

@ -290,7 +290,6 @@ def suggest_special(text):
def suggest_based_on_last_token(token, stmt):
if isinstance(token, str):
token_v = token.lower()
elif isinstance(token, Comparison):
@ -399,7 +398,6 @@ def suggest_based_on_last_token(token, stmt):
elif (token_v.endswith("join") and token.is_keyword) or (
token_v in ("copy", "from", "update", "into", "describe", "truncate")
):
schema = stmt.get_identifier_schema()
tables = extract_tables(stmt.text_before_cursor)
is_join = token_v.endswith("join") and token.is_keyword
@ -436,7 +434,6 @@ def suggest_based_on_last_token(token, stmt):
try:
prev = stmt.get_previous_token(token).value.lower()
if prev in ("drop", "alter", "create", "create or replace"):
# Suggest functions from either the currently-selected schema or the
# public schema if no schema has been specified
suggest = []

View File

@ -9,6 +9,10 @@ smart_completion = True
# visible.)
wider_completion_menu = False
# Do not create new connections for refreshing completions; Equivalent to
# always running with the --single-connection flag.
always_use_single_connection = False
# Multi-line mode allows breaking up the sql statements into multiple lines. If
# this is set to True, then the end of the statements must have a semi-colon.
# If this is set to False then sql statements can't be split into multiple
@ -31,10 +35,14 @@ destructive_warning = drop, shutdown, delete, truncate, alter, update, unconditi
# Destructive warning can restart the connection if this is enabled and the
# user declines. This means that any current uncommitted transaction can be
# aborted if the user doesn't want to proceed with a destructive_warning
# aborted if the user doesn't want to proceed with a destructive_warning
# statement.
destructive_warning_restarts_connection = False
# When this option is on (and if `destructive_warning` is not empty),
# destructive statements are not executed when outside of a transaction.
destructive_statements_require_transaction = False
# Enables expand mode, which is similar to `\x` in psql.
expand = False
@ -99,9 +107,10 @@ qualify_columns = if_more_than_one_table
# When no schema is entered, only suggest objects in search_path
search_path_filter = False
# Default pager.
# By default 'PAGER' environment variable is used
# pager = less -SRXF
# Default pager. See https://www.pgcli.com/pager for more information on settings.
# By default 'PAGER' environment variable is used. If the pager is less, and the 'LESS'
# environment variable is not set, then LESS='-SRXF' will be automatically set.
# pager = less
# Timing of sql statements and table rendering.
timing = True

View File

@ -183,7 +183,6 @@ class PGCompleter(Completer):
self.all_completions.update(additional_keywords)
def extend_schemata(self, schemata):
# schemata is a list of schema names
schemata = self.escaped_names(schemata)
metadata = self.dbmetadata["tables"]
@ -252,7 +251,6 @@ class PGCompleter(Completer):
self.all_completions.add(colname)
def extend_functions(self, func_data):
# func_data is a list of function metadata namedtuples
# dbmetadata['schema_name']['functions']['function_name'] should return
@ -286,7 +284,6 @@ class PGCompleter(Completer):
}
def extend_foreignkeys(self, fk_data):
# fk_data is a list of ForeignKey namedtuples, with fields
# parentschema, childschema, parenttable, childtable,
# parentcolumns, childcolumns
@ -309,7 +306,6 @@ class PGCompleter(Completer):
parcolmeta.foreignkeys.append(fk)
def extend_datatypes(self, type_data):
# dbmetadata['datatypes'][schema_name][type_name] should store type
# metadata, such as composite type field names. Currently, we're not
# storing any metadata beyond typename, so just store None
@ -723,7 +719,6 @@ class PGCompleter(Completer):
return self.find_matches(word_before_cursor, conds, meta="join")
def get_function_matches(self, suggestion, word_before_cursor, alias=False):
if suggestion.usage == "from":
# Only suggest functions allowed in FROM clause

View File

@ -76,7 +76,6 @@ class ProtocolSafeCursor(psycopg.Cursor):
class PGExecute:
# The boolean argument to the current_schemas function indicates whether
# implicit schemas, e.g. pg_catalog
search_path_query = """
@ -180,7 +179,6 @@ class PGExecute:
dsn=None,
**kwargs,
):
conn_params = self._conn_params.copy()
new_params = {
@ -203,7 +201,11 @@ class PGExecute:
conn_params.update({k: v for k, v in new_params.items() if v})
conn_info = make_conninfo(**conn_params)
if "dsn" in conn_params:
other_params = {k: v for k, v in conn_params.items() if k != "dsn"}
conn_info = make_conninfo(conn_params["dsn"], **other_params)
else:
conn_info = make_conninfo(**conn_params)
conn = psycopg.connect(conn_info)
conn.cursor_factory = ProtocolSafeCursor

View File

@ -1,18 +1,14 @@
from pkg_resources import packaging
import prompt_toolkit
from prompt_toolkit.key_binding.vi_state import InputMode
from prompt_toolkit.application import get_app
parse_version = packaging.version.parse
vi_modes = {
InputMode.INSERT: "I",
InputMode.NAVIGATION: "N",
InputMode.REPLACE: "R",
InputMode.INSERT_MULTIPLE: "M",
}
if parse_version(prompt_toolkit.__version__) >= parse_version("3.0.6"):
# REPLACE_SINGLE is available in prompt_toolkit >= 3.0.6
if "REPLACE_SINGLE" in {e.name for e in InputMode}:
vi_modes[InputMode.REPLACE_SINGLE] = "R"

View File

@ -146,7 +146,7 @@ class Visualizer:
elif self.explain.get("Max Rows") < plan["Actual Rows"]:
self.explain["Max Rows"] = plan["Actual Rows"]
if not self.explain.get("MaxCost"):
if not self.explain.get("Max Cost"):
self.explain["Max Cost"] = plan["Actual Cost"]
elif self.explain.get("Max Cost") < plan["Actual Cost"]:
self.explain["Max Cost"] = plan["Actual Cost"]
@ -171,7 +171,7 @@ class Visualizer:
return self.warning_format("%.2f ms" % value)
elif value < 60000:
return self.critical_format(
"%.2f s" % (value / 2000.0),
"%.2f s" % (value / 1000.0),
)
else:
return self.critical_format(

View File

@ -23,6 +23,23 @@ Feature: run the cli,
When we send "ctrl + d"
then dbcli exits
Scenario: confirm exit when a transaction is ongoing
When we begin transaction
and we try to send "ctrl + d"
then we see ongoing transaction message
when we send "c"
then dbcli exits
Scenario: cancel exit when a transaction is ongoing
When we begin transaction
and we try to send "ctrl + d"
then we see ongoing transaction message
when we send "a"
then we see dbcli prompt
when we rollback transaction
when we send "ctrl + d"
then dbcli exits
Scenario: interrupt current query via "ctrl + c"
When we send sleep query
and we send "ctrl + c"

View File

@ -64,13 +64,22 @@ def step_ctrl_d(context):
"""
Send Ctrl + D to hopefully exit.
"""
step_try_to_ctrl_d(context)
context.cli.expect(pexpect.EOF, timeout=5)
context.exit_sent = True
@when('we try to send "ctrl + d"')
def step_try_to_ctrl_d(context):
"""
Send Ctrl + D, perhaps exiting, perhaps not (if a transaction is
ongoing).
"""
# turn off pager before exiting
context.cli.sendcontrol("c")
context.cli.sendline(r"\pset pager off")
wrappers.wait_prompt(context)
context.cli.sendcontrol("d")
context.cli.expect(pexpect.EOF, timeout=5)
context.exit_sent = True
@when('we send "ctrl + c"')
@ -87,6 +96,14 @@ def step_see_cancelled_query_warning(context):
wrappers.expect_exact(context, "cancelled query", timeout=2)
@then("we see ongoing transaction message")
def step_see_ongoing_transaction_error(context):
"""
Make sure we receive the warning that a transaction is ongoing.
"""
context.cli.expect("A transaction is ongoing.", timeout=2)
@when("we send sleep query")
def step_send_sleep_15_seconds(context):
"""
@ -199,3 +216,16 @@ def step_resppond_to_destructive_command(context, response):
def step_send_password(context):
wrappers.expect_exact(context, "Password for", timeout=5)
context.cli.sendline(context.conf["pass"] or "DOES NOT MATTER")
@when('we send "{text}"')
def step_send_text(context, text):
context.cli.sendline(text)
# Try to detect whether we are exiting. If so, set `exit_sent`
# so that `after_scenario` correctly cleans up.
try:
context.cli.expect(pexpect.EOF, timeout=0.2)
except pexpect.TIMEOUT:
pass
else:
context.exit_sent = True

View File

@ -3,10 +3,7 @@ import pexpect
from pgcli.main import COLOR_CODE_REGEX
import textwrap
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from io import StringIO
def expect_exact(context, expected, timeout):

View File

@ -34,7 +34,7 @@ def test_output_sql_insert():
"Jackson",
"jackson_test@gmail.com",
"132454789",
"",
None,
"2022-09-09 19:44:32.712343+08",
"2022-09-09 19:44:32.712343+08",
]
@ -58,7 +58,7 @@ def test_output_sql_insert():
output_list = [l for l in output]
expected = [
'INSERT INTO "user" ("id", "name", "email", "phone", "description", "created_at", "updated_at") VALUES',
" ('1', 'Jackson', 'jackson_test@gmail.com', '132454789', '', "
" ('1', 'Jackson', 'jackson_test@gmail.com', '132454789', NULL, "
+ "'2022-09-09 19:44:32.712343+08', '2022-09-09 19:44:32.712343+08')",
";",
]

View File

@ -216,7 +216,6 @@ def pset_pager_mocks():
with mock.patch("pgcli.main.click.echo") as mock_echo, mock.patch(
"pgcli.main.click.echo_via_pager"
) as mock_echo_via_pager, mock.patch.object(cli, "prompt_app") as mock_app:
yield cli, mock_echo, mock_echo_via_pager, mock_app
@ -297,6 +296,22 @@ def test_i_works(tmpdir, executor):
run(executor, statement, pgspecial=cli.pgspecial)
@dbtest
def test_echo_works(executor):
cli = PGCli(pgexecute=executor)
statement = r"\echo asdf"
result = run(executor, statement, pgspecial=cli.pgspecial)
assert result == ["asdf"]
@dbtest
def test_qecho_works(executor):
cli = PGCli(pgexecute=executor)
statement = r"\qecho asdf"
result = run(executor, statement, pgspecial=cli.pgspecial)
assert result == ["asdf"]
@dbtest
def test_watch_works(executor):
cli = PGCli(pgexecute=executor)
@ -371,7 +386,6 @@ def test_quoted_db_uri(tmpdir):
def test_pg_service_file(tmpdir):
with mock.patch.object(PGCli, "connect") as mock_connect:
cli = PGCli(pgclirc_file=str(tmpdir.join("rcfile")))
with open(tmpdir.join(".pg_service.conf").strpath, "w") as service_conf: