Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
74737: spanconfigsqltranslator: introduce a pts table reader r=adityamaru a=adityamaru

This change introduces a `ProtectedTimestampTableReader`
that provides a txn scoped, in-memory view of the system
table that stores protected timestamp records.

The `SQLTranslator` will use this table reader to generate
SpanConfigs and SystemSpanConfigs in a follow up PR.

Informs: #73727

Release note: None

75125: *: replace "testdata" with sed scripts r=rickystewart a=otan

Not a proper audit, but 80% of the way there.
Refs: #71928

See individual commits for details

75161: randgen: do not create computed columns with bad volatilities r=mgartner a=mgartner

Some cast to STRING types has been given an incorrect volatility. For
example, REGCLASS->STRING casts are immutable when they should be stable
(see #74286 and #74553 for more details).

Creating computed column expressions with such a cast can cause logical
correctness bugs and internal errors. The volatilities cannot be fixed
without causing backward incompatibility. This commit prevents `randgen`
from creating computed columns with these casts so that sqlsmith and TLP
do not repetitively find these known volatility bugs.

Informs #74727

Release note: None

75165: kv: fix GenerateForcedRetryableError to return a bumped epoch r=lidorcarmel a=lidorcarmel

This is needed for PR #74563, where we change how txn is reset.
Without this change GenerateForcedRetryableError returns an
error with an inner txn that has an epoch 0. With this change
the epoch is copied from the original txn.

Release note: None

Co-authored-by: Aditya Maru <adityamaru@gmail.com>
Co-authored-by: Oliver Tan <otan@cockroachlabs.com>
Co-authored-by: Marcus Gartner <marcus@cockroachlabs.com>
Co-authored-by: Lidor Carmel <lidor@cockroachlabs.com>
  • Loading branch information
5 people committed Jan 20, 2022
5 parents 46bdd4e + 67170ed + 58bb285 + 8b5f53c + 304c548 commit 506412f
Show file tree
Hide file tree
Showing 147 changed files with 563 additions and 401 deletions.
2 changes: 1 addition & 1 deletion pkg/ccl/backupccl/backup_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ func TestBackupRestoreDataDriven(t *testing.T) {
defer httpServerCleanup()

ctx := context.Background()
datadriven.Walk(t, "testdata/backup-restore/", func(t *testing.T, path string) {
datadriven.Walk(t, testutils.TestDataPath(t, "backup-restore", ""), func(t *testing.T, path string) {
var lastCreatedServer string
ds := newDatadrivenTestState()
defer ds.cleanup(ctx)
Expand Down
4 changes: 2 additions & 2 deletions pkg/ccl/backupccl/restore_mid_schema_change_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,8 @@ func TestRestoreMidSchemaChange(t *testing.T) {

skip.UnderRaceWithIssue(t, 56584)

const (
testdataBase = "testdata/restore_mid_schema_change"
var (
testdataBase = testutils.TestDataPath(t, "restore_mid_schema_change")
exportDirs = testdataBase + "/exports"
)
for _, isClusterRestore := range []bool{true, false} {
Expand Down
5 changes: 3 additions & 2 deletions pkg/ccl/backupccl/restore_old_sequences_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import (
"testing"

"github.com/cockroachdb/cockroach/pkg/base"
"github.com/cockroachdb/cockroach/pkg/testutils"
"github.com/cockroachdb/cockroach/pkg/util/leaktest"
"github.com/cockroachdb/cockroach/pkg/util/log"
"github.com/stretchr/testify/require"
Expand All @@ -41,8 +42,8 @@ import (
func TestRestoreOldSequences(t *testing.T) {
defer leaktest.AfterTest(t)()
defer log.Scope(t).Close(t)
const (
testdataBase = "testdata/restore_old_sequences"
var (
testdataBase = testutils.TestDataPath(t, "restore_old_sequences")
exportDirs = testdataBase + "/exports"
)

Expand Down
12 changes: 6 additions & 6 deletions pkg/ccl/backupccl/restore_old_versions_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,8 @@ import (
func TestRestoreOldVersions(t *testing.T) {
defer leaktest.AfterTest(t)()
defer log.Scope(t).Close(t)
const (
testdataBase = "testdata/restore_old_versions"
testdataBase := testutils.TestDataPath(t, "restore_old_versions")
var (
exportDirsWithoutInterleave = testdataBase + "/exports-without-interleaved"
exportDirs = testdataBase + "/exports"
fkRevDirs = testdataBase + "/fk-rev-history"
Expand Down Expand Up @@ -668,7 +668,7 @@ func TestRestoreOldBackupMissingOfflineIndexes(t *testing.T) {
skip.UnderRace(t, "times out under race cause it starts up two test servers")
ctx := context.Background()

badBackups, err := filepath.Abs("testdata/restore_old_versions/inc_missing_addsst/v20.2.7")
badBackups, err := filepath.Abs(testutils.TestDataPath(t, "restore_old_versions", "inc_missing_addsst", "v20.2.7"))
require.NoError(t, err)
args := base.TestServerArgs{ExternalIODir: badBackups}
backupDirs := make([]string, 9)
Expand Down Expand Up @@ -741,10 +741,10 @@ func TestRestoreWithDroppedSchemaCorruption(t *testing.T) {
defer log.Scope(t).Close(t)
ctx := context.Background()

backupDir := testutils.TestDataPath(t, "restore_with_dropped_schema", "exports", "v20.2.7")
const (
dbName = "foo"
backupDir = "testdata/restore_with_dropped_schema/exports/v20.2.7"
fromDir = "nodelocal://0/"
dbName = "foo"
fromDir = "nodelocal://0/"
)

args := base.TestServerArgs{ExternalIODir: backupDir}
Expand Down
4 changes: 2 additions & 2 deletions pkg/ccl/backupccl/show_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -555,8 +555,8 @@ func TestShowUpgradedForeignKeys(t *testing.T) {
defer leaktest.AfterTest(t)()
defer log.Scope(t).Close(t)

const (
testdataBase = "testdata/restore_old_versions"
var (
testdataBase = testutils.TestDataPath(t, "restore_old_versions")
fkRevDirs = testdataBase + "/fk-rev-history"
)

Expand Down
7 changes: 4 additions & 3 deletions pkg/ccl/importccl/csv_testdata_helpers_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import (
"strings"
"testing"

"github.com/cockroachdb/cockroach/pkg/testutils"
"github.com/cockroachdb/cockroach/pkg/util"
"github.com/cockroachdb/cockroach/pkg/util/envutil"
"github.com/cockroachdb/errors"
Expand All @@ -39,7 +40,7 @@ func makeShadowKeyTestFile(t testing.TB, numRowsImportedBefore int, suffix strin
t.Fatal(errors.Errorf("table has no existing rows to shadow"))
}
padding := 10
dir := filepath.Join("testdata", "csv")
dir := testutils.TestDataPath(t, "csv")
fileName := filepath.Join(dir, fmt.Sprintf("shadow-data%s", suffix))
f, err := os.Create(fileName)
if err != nil {
Expand Down Expand Up @@ -70,7 +71,7 @@ func makeShadowKeyTestFile(t testing.TB, numRowsImportedBefore int, suffix strin
}

func makeDupWithSameValueFile(t testing.TB, suffix string) {
dir := filepath.Join("testdata", "csv")
dir := testutils.TestDataPath(t, "csv")
fileName := filepath.Join(dir, fmt.Sprintf("dup-key-same-value%s", suffix))
f, err := os.Create(fileName)
if err != nil {
Expand Down Expand Up @@ -199,7 +200,7 @@ func makeCSVData(
t testing.TB, numFiles, rowsPerFile, numRaceFiles, rowsPerRaceFile int,
) csvTestFiles {
if rewriteCSVTestData {
dir := filepath.Join("testdata", "csv")
dir := testutils.TestDataPath(t, "csv")
if err := os.RemoveAll(dir); err != nil {
t.Fatal(err)
}
Expand Down
3 changes: 2 additions & 1 deletion pkg/ccl/importccl/import_processor_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ import (
"github.com/cockroachdb/cockroach/pkg/sql/rowenc"
"github.com/cockroachdb/cockroach/pkg/sql/rowexec"
"github.com/cockroachdb/cockroach/pkg/sql/sem/tree"
"github.com/cockroachdb/cockroach/pkg/testutils"
"github.com/cockroachdb/cockroach/pkg/testutils/serverutils"
"github.com/cockroachdb/cockroach/pkg/testutils/sqlutils"
"github.com/cockroachdb/cockroach/pkg/util/ctxgroup"
Expand Down Expand Up @@ -999,7 +1000,7 @@ func avroFormat(t *testing.T, format roachpb.AvroOptions_Format) roachpb.IOFileF

if format != roachpb.AvroOptions_OCF {
// Need to load schema for record specific inputs.
bytes, err := ioutil.ReadFile("testdata/avro/simple-schema.json")
bytes, err := ioutil.ReadFile(testutils.TestDataPath(t, "avro", "simple-schema.json"))
require.NoError(t, err)
avro.SchemaJSON = string(bytes)
avro.RecordSeparator = '\n'
Expand Down
42 changes: 21 additions & 21 deletions pkg/ccl/importccl/import_stmt_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1564,7 +1564,7 @@ func TestImportRowLimit(t *testing.T) {
defer srv.Close()

ctx := context.Background()
baseDir := filepath.Join("testdata")
baseDir := testutils.TestDataPath(t)
args := base.TestServerArgs{ExternalIODir: baseDir}
tc := testcluster.StartTestCluster(t, 1, base.TestClusterArgs{ServerArgs: args})
defer tc.Stopper().Stop(ctx)
Expand Down Expand Up @@ -2492,7 +2492,7 @@ func TestImportFeatureFlag(t *testing.T) {
rowsPerRaceFile := 16

ctx := context.Background()
baseDir := filepath.Join("testdata", "csv")
baseDir := testutils.TestDataPath(t, "csv")
tc := testcluster.StartTestCluster(t, nodes, base.TestClusterArgs{ServerArgs: base.TestServerArgs{ExternalIODir: baseDir}})
defer tc.Stopper().Stop(ctx)
sqlDB := sqlutils.MakeSQLRunner(tc.Conns[0])
Expand Down Expand Up @@ -2530,7 +2530,7 @@ func TestImportObjectLevelRBAC(t *testing.T) {
const nodes = 3

ctx := context.Background()
baseDir := filepath.Join("testdata", "pgdump")
baseDir := testutils.TestDataPath(t, "pgdump")
tc := testcluster.StartTestCluster(t, nodes, base.TestClusterArgs{ServerArgs: base.TestServerArgs{
ExternalIODir: baseDir,
SQLMemoryPoolSize: 256 << 20,
Expand Down Expand Up @@ -3850,7 +3850,7 @@ func TestImportDefault(t *testing.T) {
testFiles := makeCSVData(t, numFiles, rowsPerFile, nodes, rowsPerRaceFile)

ctx := context.Background()
baseDir := filepath.Join("testdata", "csv")
baseDir := testutils.TestDataPath(t, "csv")
tc := testcluster.StartTestCluster(t, nodes, base.TestClusterArgs{ServerArgs: base.TestServerArgs{ExternalIODir: baseDir}})
defer tc.Stopper().Stop(ctx)
conn := tc.Conns[0]
Expand Down Expand Up @@ -4278,7 +4278,7 @@ func TestImportDefaultNextVal(t *testing.T) {
testFiles := makeCSVData(t, numFiles, rowsPerFile, numFiles, rowsPerRaceFile)

ctx := context.Background()
baseDir := filepath.Join("testdata", "csv")
baseDir := testutils.TestDataPath(t, "csv")
tc := testcluster.StartTestCluster(t, nodes, base.TestClusterArgs{ServerArgs: base.TestServerArgs{ExternalIODir: baseDir}})
defer tc.Stopper().Stop(ctx)
conn := tc.Conns[0]
Expand Down Expand Up @@ -4537,7 +4537,7 @@ func TestImportComputed(t *testing.T) {
const nodes = 3

ctx := context.Background()
baseDir := filepath.Join("testdata", "csv")
baseDir := testutils.TestDataPath(t, "csv")
tc := testcluster.StartTestCluster(t, nodes, base.TestClusterArgs{ServerArgs: base.TestServerArgs{ExternalIODir: baseDir}})
defer tc.Stopper().Stop(ctx)
conn := tc.Conns[0]
Expand Down Expand Up @@ -5112,7 +5112,7 @@ func TestImportMysql(t *testing.T) {
nodes = 3
)
ctx := context.Background()
baseDir := filepath.Join("testdata")
baseDir := testutils.TestDataPath(t)
args := base.TestServerArgs{ExternalIODir: baseDir}
tc := testcluster.StartTestCluster(t, nodes, base.TestClusterArgs{ServerArgs: args})
defer tc.Stopper().Stop(ctx)
Expand Down Expand Up @@ -5241,7 +5241,7 @@ func TestImportIntoMysql(t *testing.T) {
nodes = 3
)
ctx := context.Background()
baseDir := filepath.Join("testdata")
baseDir := testutils.TestDataPath(t)
args := base.TestServerArgs{ExternalIODir: baseDir}
tc := testcluster.StartTestCluster(t, nodes, base.TestClusterArgs{ServerArgs: args})
defer tc.Stopper().Stop(ctx)
Expand All @@ -5268,7 +5268,7 @@ func TestImportDelimited(t *testing.T) {
nodes = 3
)
ctx := context.Background()
baseDir := filepath.Join("testdata", "mysqlout")
baseDir := testutils.TestDataPath(t, "mysqlout")
args := base.TestServerArgs{ExternalIODir: baseDir}
tc := testcluster.StartTestCluster(t, nodes, base.TestClusterArgs{ServerArgs: args})
defer tc.Stopper().Stop(ctx)
Expand Down Expand Up @@ -5348,7 +5348,7 @@ func TestImportPgCopy(t *testing.T) {
nodes = 3
)
ctx := context.Background()
baseDir := filepath.Join("testdata", "pgcopy")
baseDir := testutils.TestDataPath(t, "pgcopy")
args := base.TestServerArgs{ExternalIODir: baseDir}
tc := testcluster.StartTestCluster(t, nodes, base.TestClusterArgs{ServerArgs: args})
defer tc.Stopper().Stop(ctx)
Expand Down Expand Up @@ -5434,7 +5434,7 @@ func TestImportPgDump(t *testing.T) {
nodes = 3
)
ctx := context.Background()
baseDir := filepath.Join("testdata")
baseDir := testutils.TestDataPath(t)
args := base.TestServerArgs{ExternalIODir: baseDir}
tc := testcluster.StartTestCluster(t, nodes, base.TestClusterArgs{ServerArgs: args})
defer tc.Stopper().Stop(ctx)
Expand Down Expand Up @@ -5803,7 +5803,7 @@ func TestImportPgDumpGeo(t *testing.T) {

const nodes = 1
ctx := context.Background()
baseDir := filepath.Join("testdata", "pgdump")
baseDir := testutils.TestDataPath(t, "pgdump")
args := base.TestServerArgs{ExternalIODir: baseDir}

t.Run("geo_shp2pgsql.sql", func(t *testing.T) {
Expand Down Expand Up @@ -5878,7 +5878,7 @@ func TestImportPgDumpDropTable(t *testing.T) {
defer log.Scope(t).Close(t)

ctx := context.Background()
baseDir := filepath.Join("testdata")
baseDir := testutils.TestDataPath(t)
args := base.TestServerArgs{ExternalIODir: baseDir}
tc := testcluster.StartTestCluster(t, 1, base.TestClusterArgs{ServerArgs: args})
defer tc.Stopper().Stop(ctx)
Expand Down Expand Up @@ -5964,7 +5964,7 @@ func TestImportPgDumpSchemas(t *testing.T) {

const nodes = 1
ctx := context.Background()
baseDir := filepath.Join("testdata", "pgdump")
baseDir := testutils.TestDataPath(t, "pgdump")
args := base.TestServerArgs{ExternalIODir: baseDir}

// Simple schema test which creates 3 schemas with a single `test` table in
Expand Down Expand Up @@ -6171,7 +6171,7 @@ func TestImportCockroachDump(t *testing.T) {
nodes = 3
)
ctx := context.Background()
baseDir := filepath.Join("testdata")
baseDir := testutils.TestDataPath(t)
args := base.TestServerArgs{ExternalIODir: baseDir}
tc := testcluster.StartTestCluster(t, nodes, base.TestClusterArgs{ServerArgs: args})
defer tc.Stopper().Stop(ctx)
Expand Down Expand Up @@ -6225,7 +6225,7 @@ func TestCreateStatsAfterImport(t *testing.T) {

const nodes = 1
ctx := context.Background()
baseDir := filepath.Join("testdata")
baseDir := testutils.TestDataPath(t)
args := base.TestServerArgs{ExternalIODir: baseDir}
tc := testcluster.StartTestCluster(t, nodes, base.TestClusterArgs{ServerArgs: args})
defer tc.Stopper().Stop(ctx)
Expand Down Expand Up @@ -6260,7 +6260,7 @@ func TestImportAvro(t *testing.T) {
nodes = 3
)
ctx := context.Background()
baseDir := filepath.Join("testdata", "avro")
baseDir := testutils.TestDataPath(t, "avro")
args := base.TestServerArgs{ExternalIODir: baseDir}
tc := testcluster.StartTestCluster(t, nodes, base.TestClusterArgs{ServerArgs: args})
defer tc.Stopper().Stop(ctx)
Expand Down Expand Up @@ -6392,7 +6392,7 @@ func TestImportMultiRegion(t *testing.T) {
defer leaktest.AfterTest(t)()
defer log.Scope(t).Close(t)

baseDir := filepath.Join("testdata")
baseDir := testutils.TestDataPath(t)
tc, sqlDB, cleanup := multiregionccltestutils.TestingCreateMultiRegionCluster(
t, 2 /* numServers */, base.TestingKnobs{}, multiregionccltestutils.WithBaseDirectory(baseDir),
)
Expand Down Expand Up @@ -6885,7 +6885,7 @@ func TestImportInTenant(t *testing.T) {
defer log.Scope(t).Close(t)

ctx := context.Background()
baseDir := filepath.Join("testdata")
baseDir := testutils.TestDataPath(t)
args := base.TestServerArgs{ExternalIODir: baseDir}
tc := testcluster.StartTestCluster(t, 1, base.TestClusterArgs{ServerArgs: args})
defer tc.Stopper().Stop(ctx)
Expand Down Expand Up @@ -6976,7 +6976,7 @@ func TestDetachedImport(t *testing.T) {
nodes = 3
)
ctx := context.Background()
baseDir := filepath.Join("testdata", "avro")
baseDir := testutils.TestDataPath(t, "avro")
args := base.TestServerArgs{ExternalIODir: baseDir}
tc := testcluster.StartTestCluster(t, nodes, base.TestClusterArgs{ServerArgs: args})
defer tc.Stopper().Stop(ctx)
Expand Down Expand Up @@ -7077,7 +7077,7 @@ func TestImportJobEventLogging(t *testing.T) {
nodes = 3
)
ctx := context.Background()
baseDir := filepath.Join("testdata", "avro")
baseDir := testutils.TestDataPath(t, "avro")
args := base.TestServerArgs{ExternalIODir: baseDir}
args.Knobs = base.TestingKnobs{JobsTestingKnobs: jobs.NewTestingKnobsWithShortIntervals()}
params := base.TestClusterArgs{ServerArgs: args}
Expand Down
13 changes: 7 additions & 6 deletions pkg/ccl/importccl/mysql_testdata_helpers_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import (
"unicode/utf8"

"github.com/cockroachdb/cockroach/pkg/roachpb"
"github.com/cockroachdb/cockroach/pkg/testutils"
"github.com/cockroachdb/cockroach/pkg/util/envutil"
"github.com/cockroachdb/cockroach/pkg/util/timeutil"
_ "github.com/go-sql-driver/mysql"
Expand Down Expand Up @@ -121,10 +122,10 @@ type testFiles struct {
func getMysqldumpTestdata(t *testing.T) testFiles {
var files testFiles

files.simple = filepath.Join(`testdata`, `mysqldump`, `simple.sql`)
files.second = filepath.Join(`testdata`, `mysqldump`, `second.sql`)
files.everything = filepath.Join(`testdata`, `mysqldump`, `everything.sql`)
files.wholeDB = filepath.Join(`testdata`, `mysqldump`, `db.sql`)
files.simple = testutils.TestDataPath(t, "mysqldump", "simple.sql")
files.second = testutils.TestDataPath(t, "mysqldump", "second.sql")
files.everything = testutils.TestDataPath(t, "mysqldump", "everything.sql")
files.wholeDB = testutils.TestDataPath(t, "mysqldump", "db.sql")

if rewriteMysqlTestData {
genMysqlTestdata(t, func() {
Expand Down Expand Up @@ -190,12 +191,12 @@ func getMysqlOutfileTestdata(t *testing.T) ([]simpleTestRow, []outfileDumpCfg) {
}

for i := range configs {
configs[i].filename = filepath.Join(`testdata`, `mysqlout`, configs[i].name, `simple.txt`)
configs[i].filename = testutils.TestDataPath(t, `mysqlout`, configs[i].name, `simple.txt`)
}

if rewriteMysqlTestData {
genMysqlTestdata(t, func() {
if err := os.RemoveAll(filepath.Join(`testdata`, `mysqlout`)); err != nil {
if err := os.RemoveAll(testutils.TestDataPath(t, `mysqlout`)); err != nil {
t.Fatal(err)
}
for _, cfg := range configs {
Expand Down

0 comments on commit 506412f

Please sign in to comment.