From b4eb50ab55970e4060b95ed1b01f2777ccceb975 Mon Sep 17 00:00:00 2001 From: scheibling Date: Tue, 8 Apr 2025 19:16:39 +0200 Subject: [PATCH] Created --- .github/workflows/go.yml | 25 + .gitignore | 20 + LICENSE.md | 29 + README.md | 179 +++ archivex/zipx/testdata/a/a.txt | 1024 +++++++++++++++++ archivex/zipx/testdata/b/b.txt | 1 + archivex/zipx/testdata/中国/你好.txt | 2 + archivex/zipx/zip.go | 163 +++ archivex/zipx/zip_test.go | 68 ++ bytex/byte.go | 90 ++ bytex/byte_test.go | 126 ++ cryptox/crc32.go | 7 + cryptox/md5.go | 12 + cryptox/sha1.go | 12 + extractx/number.go | 110 ++ extractx/number_test.go | 59 + filepathx/filepath.go | 290 +++++ filepathx/filepath_test.go | 240 ++++ filepathx/testdata/0.txt | 0 .../testdata/1/1.1/1.1.1/中文_ZH (1).txt | 0 .../testdata/1/1.1/1.1.1/中文_ZH (9).txt | 0 filepathx/testdata/1/1.1/1.1.txt | 0 filepathx/testdata/1/1.1/1.1/中文_ZH (1).txt | 0 filepathx/testdata/2/2.txt | 0 filex/file.go | 40 + filex/file_test.go | 66 ++ fmtx/fmt.go | 68 ++ go.mod | 10 + htmlx/html.go | 186 +++ htmlx/html_test.go | 200 ++++ inx/in.go | 37 + inx/in_test.go | 67 ++ ipx/ip.go | 123 ++ ipx/ip_test.go | 99 ++ isx/is.go | 177 +++ isx/is_test.go | 231 ++++ jsonx/json.go | 157 +++ jsonx/json_test.go | 166 +++ jsonx/parser.go | 246 ++++ jsonx/parser_test.go | 78 ++ keyx/key.go | 70 ++ keyx/key_test.go | 63 + mapx/map.go | 52 + mapx/map_test.go | 60 + net/urlx/url.go | 91 ++ net/urlx/url_test.go | 88 ++ nullx/string.go | 18 + nullx/time.go | 17 + pathx/path.go | 18 + pathx/path_test.go | 25 + randx/rand.go | 46 + randx/rand_test.go | 17 + setx/set.go | 70 ++ setx/set_test.go | 92 ++ slicex/slice.go | 241 ++++ slicex/slice_test.go | 219 ++++ spreedsheetx/column.go | 192 ++++ spreedsheetx/column_test.go | 88 ++ stringx/string.go | 554 +++++++++ stringx/string_test.go | 547 +++++++++ timex/time.go | 123 ++ timex/time_test.go | 217 ++++ type.go | 17 + 63 files changed, 7333 insertions(+) create mode 100644 .github/workflows/go.yml create mode 100644 .gitignore create mode 100644 LICENSE.md create mode 100644 README.md create mode 100644 archivex/zipx/testdata/a/a.txt create mode 100644 archivex/zipx/testdata/b/b.txt create mode 100644 archivex/zipx/testdata/中国/你好.txt create mode 100644 archivex/zipx/zip.go create mode 100644 archivex/zipx/zip_test.go create mode 100644 bytex/byte.go create mode 100644 bytex/byte_test.go create mode 100644 cryptox/crc32.go create mode 100644 cryptox/md5.go create mode 100644 cryptox/sha1.go create mode 100644 extractx/number.go create mode 100644 extractx/number_test.go create mode 100644 filepathx/filepath.go create mode 100644 filepathx/filepath_test.go create mode 100644 filepathx/testdata/0.txt create mode 100644 filepathx/testdata/1/1.1/1.1.1/中文_ZH (1).txt create mode 100644 filepathx/testdata/1/1.1/1.1.1/中文_ZH (9).txt create mode 100644 filepathx/testdata/1/1.1/1.1.txt create mode 100644 filepathx/testdata/1/1.1/1.1/中文_ZH (1).txt create mode 100644 filepathx/testdata/2/2.txt create mode 100644 filex/file.go create mode 100644 filex/file_test.go create mode 100644 fmtx/fmt.go create mode 100644 go.mod create mode 100644 htmlx/html.go create mode 100644 htmlx/html_test.go create mode 100644 inx/in.go create mode 100644 inx/in_test.go create mode 100644 ipx/ip.go create mode 100644 ipx/ip_test.go create mode 100644 isx/is.go create mode 100644 isx/is_test.go create mode 100644 jsonx/json.go create mode 100644 jsonx/json_test.go create mode 100644 jsonx/parser.go create mode 100644 jsonx/parser_test.go create mode 100644 keyx/key.go create mode 100644 keyx/key_test.go create mode 100644 mapx/map.go create mode 100644 mapx/map_test.go create mode 100644 net/urlx/url.go create mode 100644 net/urlx/url_test.go create mode 100644 nullx/string.go create mode 100644 nullx/time.go create mode 100644 pathx/path.go create mode 100644 pathx/path_test.go create mode 100644 randx/rand.go create mode 100644 randx/rand_test.go create mode 100644 setx/set.go create mode 100644 setx/set_test.go create mode 100644 slicex/slice.go create mode 100644 slicex/slice_test.go create mode 100644 spreedsheetx/column.go create mode 100644 spreedsheetx/column_test.go create mode 100644 stringx/string.go create mode 100644 stringx/string_test.go create mode 100644 timex/time.go create mode 100644 timex/time_test.go create mode 100644 type.go diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml new file mode 100644 index 0000000..4d990cf --- /dev/null +++ b/.github/workflows/go.yml @@ -0,0 +1,25 @@ +name: Go + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + +jobs: + + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: Set up Go + uses: actions/setup-go@v3 + with: + go-version: 1.18 + + - name: Build + run: go build -v ./... + + - name: Test + run: go test -v ./... diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a2c3d79 --- /dev/null +++ b/.gitignore @@ -0,0 +1,20 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +# *.out + +# Test coverage output +coverage*.* + +# postgres data volume used by postgres server container for testing purpose +testdata/postgres + +.idea/ \ No newline at end of file diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..8c3b89b --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2022, hiscaler +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..418d0ca --- /dev/null +++ b/README.md @@ -0,0 +1,179 @@ +gox +=== + +Golang functions library + +- archivex + - zipx + - Compress + - UnCompress +- bytex + - IsEmpty + - IsBlank + - ToString + - StartsWith + - EndsWith + - Contains +- cryptox + - Crc32 + - Md5 + - Sha1 +- extractx + - Number + - Numbers + - Float64 + - Float32 + - Int64 + - Int32 + - Int16 + - Int8 + - Int +- filepathx + - Dirs + - Files + - GenerateDirNames + - Ext +- filex + - IsDir + - IsFile + - Exists + - Size +- fmtx + - SprettyPrint + - PrettyPrint + - PrettyPrintln +- htmlx + - Strip + - Spaceless + - Clean + - Tag +- inx + - In + - StringIn + - IntIn +- ipx + - RemoteAddr + - LocalAddr + - IsPrivate + - IsPublic + - Number + - Random + - String +- isx + - Number + - Empty + - Equal + - SafeCharacters + - HttpURL + - OS + - ColorHex +- jsonx + - ToRawMessage + - ToJson + - ToPrettyJson + - EmptyObjectRawMessage + - EmptyArrayRawMessage + - IsEmptyRawMessage + - NewParser + - Exists + - Find + - Interface + - String + - Int + - Int64 + - Float32 + - Float64 + - Bool +- keyx + - Generate +- map + - Keys + - StringMapStringEncode +- net + - urlx + - NewURL + - GetValue + - SetValue + - AddValue + - DelKey + - HasKey + - String + - IsAbsolute + - IsRelative +- nullx + - StringFrom + - NullString + - TimeFrom + - NullTime +- pathx + - FilenameWithoutExt +- randx + - Letter + - Number + - Any +- setx + - ToSet + - ToStringSet + - ToIntSet +- slicex + - Map + - Filter + - ToInterface + - StringToInterface + - IntToInterface + - StringSliceEqual + - IntSliceEqual + - StringSliceReverse + - IntSliceReverse + - Diff + - StringSliceDiff + - IntSliceDiff + - Chunk +- spreedsheetx + - NewColumn() +```go + column := NewColumn("A") + column.Next() // Return `B` if successful + column.RightShift(26) // Return `AB` if successful + column.LeftShift(1) // Return `AA` if successful +``` + + +- stringx + - IsEmpty + - IsBlank + - ToNumber + - ContainsChinese + - ToNarrow + - ToWiden + - Split + - String + - RemoveEmoji + - TrimAny + - RemoveExtraSpace + - SequentialWordFields + - ToBytes + - WordMatched + - StartsWith + - EndsWith + - Contains + - QuoteMeta + - HexToByte + - Len + - UpperFirst + - LowerFirst +- timex + - IsAmericaSummerTime + - ChineseTimeLocation + - Between + - DayStart + - DayEnd + - MonthStart + - MonthEnd + - IsAM + - IsPM + - WeekStart + - WeekEnd + - YearWeeksByWeek + - YearWeeksByTime + - XISOWeek \ No newline at end of file diff --git a/archivex/zipx/testdata/a/a.txt b/archivex/zipx/testdata/a/a.txt new file mode 100644 index 0000000..1490980 --- /dev/null +++ b/archivex/zipx/testdata/a/a.txt @@ -0,0 +1,1024 @@ +first line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +one line +last line \ No newline at end of file diff --git a/archivex/zipx/testdata/b/b.txt b/archivex/zipx/testdata/b/b.txt new file mode 100644 index 0000000..68d06ee --- /dev/null +++ b/archivex/zipx/testdata/b/b.txt @@ -0,0 +1 @@ +b file content \ No newline at end of file diff --git a/archivex/zipx/testdata/中国/你好.txt b/archivex/zipx/testdata/中国/你好.txt new file mode 100644 index 0000000..46840e4 --- /dev/null +++ b/archivex/zipx/testdata/中国/你好.txt @@ -0,0 +1,2 @@ +Hello, China! +你好,中国! \ No newline at end of file diff --git a/archivex/zipx/zip.go b/archivex/zipx/zip.go new file mode 100644 index 0000000..87ff819 --- /dev/null +++ b/archivex/zipx/zip.go @@ -0,0 +1,163 @@ +package zipx + +import ( + "archive/zip" + "context" + "io" + "io/fs" + "os" + "path/filepath" + + "git.cloudyne.io/go/hiscaler-gox/filex" + "golang.org/x/sync/errgroup" +) + +type zipFile struct { + header *zip.FileHeader + data *os.File +} + +// Compress compresses files and saved, if compactDirectory is true, then will remove all directory path +func Compress(filename string, files []string, method uint16, compactDirectory bool) error { + zFile, err := os.Create(filename) + if err != nil { + return err + } + + defer zFile.Close() + zipWriter := zip.NewWriter(zFile) + defer zipWriter.Close() + + zipFiles := make([]zipFile, len(files)) + errGrp, ctx := errgroup.WithContext(context.Background()) + for i, file := range files { + f := file + j := i + errGrp.Go(func() error { + select { + case <-ctx.Done(): + return nil + default: + zf, e := addFile(f, method, compactDirectory) + if e != nil { + ctx.Done() + return e + } + zipFiles[j] = zf + return nil + } + }) + } + err = errGrp.Wait() + if err != nil { + return err + } + + for i := range zipFiles { + if zipFiles[i].data == nil { + continue + } + err = func(i int) error { + defer zipFiles[i].data.Close() + if err != nil { + return err // For close all opened files + } + writer, e := zipWriter.CreateHeader(zipFiles[i].header) + if e != nil { + return e + } + + _, e = io.Copy(writer, zipFiles[i].data) + return e + }(i) + } + return err +} + +func addFile(filename string, method uint16, compactDirectory bool) (zipFile zipFile, err error) { + pendingAddFile, err := os.Open(filename) + if err != nil { + return + } + + defer pendingAddFile.Close() + + zipFile.data = pendingAddFile + info, err := pendingAddFile.Stat() + if err != nil { + return + } + + header, err := zip.FileInfoHeader(info) + if err != nil { + return + } + + if compactDirectory { + header.Name = filepath.Base(filename) + } else { + header.Name = filename + } + header.Method = method + zipFile.header = header + return +} + +// UnCompress unzip source file to destination directory +func UnCompress(src, dst string) error { + r, err := zip.OpenReader(src) + if err != nil { + return err + } + + defer r.Close() + + // Create destination directory if not exists + if !filex.Exists(dst) { + err = os.MkdirAll(dst, fs.ModePerm) + if err != nil { + return err + } + } + + for _, file := range r.File { + path := filepath.Join(dst, file.Name) + if file.FileInfo().IsDir() { + if err = os.MkdirAll(path, file.Mode()); err != nil { + return err + } + continue + } + + dir := filepath.Dir(path) + if !filex.Exists(dir) { + err = os.MkdirAll(dir, fs.ModePerm) + if err != nil { + return err + } + } + + if err = writeFile(file, path); err != nil { + break + } + } + return err +} + +func writeFile(file *zip.File, path string) error { + fr, err := file.Open() + if err != nil { + + return err + } + + defer fr.Close() + fw, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, file.Mode()) + if err != nil { + return err + } + defer fw.Close() + + _, err = io.Copy(fw, fr) + return err +} diff --git a/archivex/zipx/zip_test.go b/archivex/zipx/zip_test.go new file mode 100644 index 0000000..dea8446 --- /dev/null +++ b/archivex/zipx/zip_test.go @@ -0,0 +1,68 @@ +package zipx + +import ( + "archive/zip" + "fmt" + "path/filepath" + "testing" + + "git.cloudyne.io/go/hiscaler-gox/filex" +) + +var files []string + +func init() { + files = []string{ + "./zip.go", + "./testdata/a/a.txt", + "./testdata/b/b.txt", + "./testdata/中国/你好.txt", + } +} + +func TestCompressCompactDirectory(t *testing.T) { + err := Compress("./a.zip", files, zip.Deflate, true) + if err != nil { + t.Error(err) + } else if !filex.Exists("./a.zip") { + t.Error("zip file not exists") + } +} + +func TestCompressUnCompactDirectory(t *testing.T) { + err := Compress("./a.zip", files, zip.Deflate, false) + if err != nil { + t.Error(err) + } else if !filex.Exists("./a.zip") { + t.Error("zip file not exists") + } +} + +func TestCompressError(t *testing.T) { + notExistsFiles := make([]string, 0) + for i := 0; i <= 100; i++ { + notExistsFiles = append(notExistsFiles, fmt.Sprintf("%d-not-exists.file", i)) + } + err := Compress("./a.zip", notExistsFiles, zip.Deflate, true) + if err == nil { + t.Error("err is nil") + } else { + t.Logf("err = %s", err.Error()) + } +} + +func TestUnCompress(t *testing.T) { + TestCompressUnCompactDirectory(t) + err := UnCompress("./a.zip", "./a") + if err != nil { + t.Error(err.Error()) + } else { + for _, file := range files { + checkFile := filepath.Join("./a", file) + if !filex.Exists(checkFile) { + t.Errorf("%s is not exists", checkFile) + break + } + } + } +} diff --git a/bytex/byte.go b/bytex/byte.go new file mode 100644 index 0000000..4dbe4b9 --- /dev/null +++ b/bytex/byte.go @@ -0,0 +1,90 @@ +package bytex + +import ( + "bytes" + "unsafe" +) + +// IsEmpty Check byte is empty +func IsEmpty(b []byte) bool { + return len(b) == 0 +} + +func IsBlank(b []byte) bool { + return len(b) == 0 || len(bytes.TrimSpace(b)) == 0 +} + +func ToString(b []byte) string { + return *(*string)(unsafe.Pointer(&b)) +} + +func StartsWith(s []byte, ss [][]byte, caseSensitive bool) bool { + if ss == nil || len(ss) == 0 { + return true + } + + has := false + if !caseSensitive { + s = bytes.ToLower(s) + } + for _, prefix := range ss { + if len(prefix) == 0 { + has = true + } else { + if !caseSensitive { + prefix = bytes.ToLower(prefix) + } + has = bytes.HasPrefix(s, prefix) + } + if has { + break + } + } + return has +} + +func EndsWith(s []byte, ss [][]byte, caseSensitive bool) bool { + if ss == nil || len(ss) == 0 { + return true + } + + has := false + if !caseSensitive { + s = bytes.ToLower(s) + } + for _, suffix := range ss { + if len(suffix) == 0 { + has = true + } else { + if !caseSensitive { + suffix = bytes.ToLower(suffix) + } + has = bytes.HasSuffix(s, suffix) + } + if has { + break + } + } + return has +} + +func Contains(s []byte, ss [][]byte, caseSensitive bool) bool { + in := false + if !caseSensitive { + s = bytes.ToLower(s) + } + for _, substr := range ss { + if len(substr) == 0 { + in = true + } else { + if !caseSensitive { + substr = bytes.ToLower(substr) + } + in = bytes.Contains(s, substr) + } + if in { + break + } + } + return in +} diff --git a/bytex/byte_test.go b/bytex/byte_test.go new file mode 100644 index 0000000..c6ba901 --- /dev/null +++ b/bytex/byte_test.go @@ -0,0 +1,126 @@ +package bytex + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func TestIsEmpty(t *testing.T) { + testCases := []struct { + Number int + Byte []byte + Except bool + }{ + {1, []byte("a"), false}, + {2, []byte(""), true}, + {3, []byte(" "), false}, + } + + for _, testCase := range testCases { + isEmpty := IsEmpty(testCase.Byte) + if isEmpty != testCase.Except { + t.Errorf("%d except: %#v, actual: %#v", testCase.Number, testCase.Except, isEmpty) + } + } +} + +func TestIsBlank(t *testing.T) { + testCases := []struct { + Number int + Byte []byte + Except bool + }{ + {1, []byte("a"), false}, + {2, []byte(""), true}, + {3, []byte(" "), true}, + } + + for _, testCase := range testCases { + isBlank := IsBlank(testCase.Byte) + if isBlank != testCase.Except { + t.Errorf("%d except: %#v, actual: %#v", testCase.Number, testCase.Except, isBlank) + } + } +} + +func TestToString(t *testing.T) { + tests := []struct { + tag string + bytesValue []byte + string string + }{ + {"t1", []byte{'a'}, "a"}, + {"t2", []byte("abc"), "abc"}, + {"t3", []byte("a b c "), "a b c "}, + } + for _, test := range tests { + s := ToString(test.bytesValue) + assert.Equal(t, test.string, s, test.tag) + } +} + +func TestStartsWith(t *testing.T) { + tests := []struct { + tag string + string []byte + words [][]byte + caseSensitive bool + except bool + }{ + {"t1", []byte("Hello world!"), [][]byte{[]byte("he"), []byte("He")}, false, true}, + {"t2", []byte("Hello world!"), [][]byte{[]byte("he"), []byte("He")}, true, true}, + {"t3", []byte("Hello world!"), [][]byte{[]byte("he")}, true, false}, + {"t4", []byte(""), [][]byte{[]byte("")}, true, true}, + {"t5", []byte(""), nil, true, true}, + {"t6", []byte(""), [][]byte{}, true, true}, + {"t7", []byte("Hello world!"), [][]byte{[]byte("")}, true, true}, + } + for _, test := range tests { + b := StartsWith(test.string, test.words, test.caseSensitive) + assert.Equal(t, test.except, b, test.tag) + } +} + +func TestEndsWith(t *testing.T) { + tests := []struct { + tag string + string []byte + words [][]byte + caseSensitive bool + except bool + }{ + {"t1", []byte("Hello world!"), [][]byte{[]byte("he"), []byte("He")}, false, false}, + {"t2", []byte("Hello world!"), [][]byte{[]byte("he"), []byte("He")}, true, false}, + {"t3", []byte("Hello world!"), [][]byte{[]byte("d!"), []byte("!")}, true, true}, + {"t4", []byte("Hello world!"), [][]byte{[]byte("WORLD!")}, false, true}, + {"t5", []byte(""), [][]byte{[]byte("")}, true, true}, + {"t6", []byte(""), nil, true, true}, + {"t7", []byte(""), [][]byte{}, true, true}, + {"t8", []byte("Hello world!"), [][]byte{[]byte("")}, true, true}, + } + for _, test := range tests { + b := EndsWith(test.string, test.words, test.caseSensitive) + assert.Equal(t, test.except, b, test.tag) + } +} + +func TestContains(t *testing.T) { + tests := []struct { + tag string + string []byte + words [][]byte + caseSensitive bool + except bool + }{ + {"t1", []byte("Hello world!"), [][]byte{[]byte("ol"), []byte("LL")}, false, true}, + {"t2", []byte("Hello world!"), [][]byte{[]byte("ol"), []byte("LL")}, true, false}, + {"t3", []byte("Hello world!"), [][]byte{[]byte("notfound"), []byte("world")}, false, true}, + {"t4", []byte("Hello world!"), [][]byte{[]byte("notfound"), []byte("world")}, true, true}, + {"t5", []byte(""), [][]byte{[]byte("")}, true, true}, + {"t6", []byte("Hello world!"), [][]byte{[]byte("")}, true, true}, + } + for _, test := range tests { + b := Contains(test.string, test.words, test.caseSensitive) + assert.Equal(t, test.except, b, test.tag) + } +} diff --git a/cryptox/crc32.go b/cryptox/crc32.go new file mode 100644 index 0000000..eccbce9 --- /dev/null +++ b/cryptox/crc32.go @@ -0,0 +1,7 @@ +package cryptox + +import "hash/crc32" + +func Crc32(s string) uint32 { + return crc32.ChecksumIEEE([]byte(s)) +} diff --git a/cryptox/md5.go b/cryptox/md5.go new file mode 100644 index 0000000..2c47a76 --- /dev/null +++ b/cryptox/md5.go @@ -0,0 +1,12 @@ +package cryptox + +import ( + "crypto/md5" + "encoding/hex" +) + +func Md5(s string) string { + h := md5.New() + h.Write([]byte(s)) + return hex.EncodeToString(h.Sum(nil)) +} diff --git a/cryptox/sha1.go b/cryptox/sha1.go new file mode 100644 index 0000000..582dc98 --- /dev/null +++ b/cryptox/sha1.go @@ -0,0 +1,12 @@ +package cryptox + +import ( + "crypto/sha1" + "encoding/hex" +) + +func Sha1(s string) string { + h := sha1.New() + h.Write([]byte(s)) + return hex.EncodeToString(h.Sum(nil)) +} diff --git a/extractx/number.go b/extractx/number.go new file mode 100644 index 0000000..c4e283e --- /dev/null +++ b/extractx/number.go @@ -0,0 +1,110 @@ +package extractx + +import ( + "regexp" + "strconv" + "strings" +) + +var rxNumber = regexp.MustCompile(`\-?\d+[\d.,]*\d*`) + +// 提取的内容默认为 1,234.56 格式的数字,未实现根据国家标准实现提取 +// https://zhuanlan.zhihu.com/p/157980325 +func clean(s string) string { + s = strings.TrimSpace(s) + if s == "" { + return s + } + s = strings.ReplaceAll(s, ",", "") + n := len(s) + if s[n-1:] == "." { + s = s[n-2 : n-1] + } + return s +} + +func Number(s string) string { + if s == "" { + return "" + } + return clean(rxNumber.FindString(s)) +} + +func Numbers(s string) []string { + if s == "" { + return []string{} + } + + matches := rxNumber.FindAllString(s, -1) + if matches == nil { + return []string{} + } + + for i, v := range matches { + matches[i] = clean(v) + } + return matches +} + +func Float64(s string) float64 { + if s = Number(s); s != "" { + if v, err := strconv.ParseFloat(s, 64); err == nil { + return v + } + } + return 0 +} + +func Float32(s string) float32 { + if s = Number(s); s != "" { + if v, err := strconv.ParseFloat(s, 64); err == nil { + return float32(v) + } + } + return 0 +} + +func Int64(s string) int64 { + if s = Number(s); s != "" { + if v, err := strconv.ParseInt(s, 10, 64); err == nil { + return v + } + } + return 0 +} + +func Int32(s string) int32 { + if s = Number(s); s != "" { + if v, err := strconv.ParseInt(s, 10, 32); err == nil { + return int32(v) + } + } + return 0 +} + +func Int16(s string) int16 { + if s = Number(s); s != "" { + if v, err := strconv.ParseInt(s, 10, 16); err == nil { + return int16(v) + } + } + return 0 +} + +func Int8(s string) int8 { + if s = Number(s); s != "" { + if v, err := strconv.ParseInt(s, 10, 16); err == nil { + return int8(v) + } + } + return 0 +} + +func Int(s string) int { + if s = Number(s); s != "" { + if v, err := strconv.ParseInt(s, 10, 16); err == nil { + return int(v) + } + } + return 0 +} diff --git a/extractx/number_test.go b/extractx/number_test.go new file mode 100644 index 0000000..9b2dc46 --- /dev/null +++ b/extractx/number_test.go @@ -0,0 +1,59 @@ +package extractx + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func TestNumber(t *testing.T) { + testCases := []struct { + tag string + string string + expected string + }{ + {"t1", "123", "123"}, + {"t2", "12.3", "12.3"}, + {"t3", "1,234.3", "1234.3"}, + {"t4", " ab 1 123", "1"}, + {"t5", ".", ""}, + {"t6", ",", ""}, + {"t7", ".,", ""}, + {"t8", "100 23.", "100"}, + {"t8.1", "$100 $23.", "100"}, + {"t9", "-1", "-1"}, + {"t10", "-1-1", "-1"}, // todo maybe is empty + {"t11", "+1", "1"}, + {"t12", "+1+1", "1"}, // todo maybe is empty + {"t13", "1.0 out of 5 stars", "1.0"}, + } + for _, testCase := range testCases { + n := Number(testCase.string) + assert.Equal(t, testCase.expected, n, testCase.tag) + } +} + +func TestNumbers(t *testing.T) { + testCases := []struct { + tag string + string string + expected []string + }{ + {"t1", "123", []string{"123"}}, + {"t2", "12.3", []string{"12.3"}}, + {"t3", "1,234.3", []string{"1234.3"}}, + {"t4", " ab 1 123", []string{"1", "123"}}, + {"t5", " ab .1 123", []string{"1", "123"}}, + {"t5", " ab ,1 123", []string{"1", "123"}}, + {"t6", " ab 1. 123", []string{"1", "123"}}, + {"t7", "$100,$200", []string{"100", "200"}}, + {"t8", "1,2.3,4", []string{"12.34"}}, + {"t9", "1, 2.3, 4", []string{"1", "2.3", "4"}}, + {"t10", "-123,4", []string{"-1234"}}, + {"t11", "1-1", []string{"1", "-1"}}, // todo May be return empty string + {"t12", "N1-1", []string{"1", "-1"}}, // todo May be return empty string + } + for _, testCase := range testCases { + n := Numbers(testCase.string) + assert.Equal(t, testCase.expected, n, testCase.tag) + } +} diff --git a/filepathx/filepath.go b/filepathx/filepath.go new file mode 100644 index 0000000..92b2a1d --- /dev/null +++ b/filepathx/filepath.go @@ -0,0 +1,290 @@ +package filepathx + +import ( + "io/fs" + "mime" + "net/http" + "os" + "path/filepath" + "strings" + + "git.cloudyne.io/go/hiscaler-gox/filex" + "git.cloudyne.io/go/hiscaler-gox/inx" +) + +const ( + searchDir = iota + searchFile +) + +type WalkOption struct { + FilterFunc func(path string) bool // 自定义函数,返回 true 则会加到列表中,否则忽略。当定义该函数时,将会忽略掉 Except, Only 设置 + Except []string // 排除的文件或者目录(仅当 FilterFunc 未设置时起作用) + Only []string // 仅仅符合列表中的文件或者目录才会返回(仅当 FilterFunc 未设置时起作用) + CaseSensitive bool // 区分大小写(作用于 Except 和 Only 设置) + Recursive bool // 是否递归查询下级目录 +} + +func read(root string, recursive bool, searchType int) []string { + dfs := os.DirFS(root) + paths := make([]string, 0) + if recursive { + fs.WalkDir(dfs, ".", func(path string, d fs.DirEntry, err error) error { + if err == nil && path != "." && path != ".." && + ((searchType == searchDir && d.IsDir()) || (searchType == searchFile && !d.IsDir())) { + paths = append(paths, filepath.Join(root, path)) + } + return nil + }) + } else { + ds, err := fs.ReadDir(dfs, ".") + if err == nil { + for _, d := range ds { + if d.Name() != "." && d.Name() != ".." && + ((searchType == searchDir && d.IsDir()) || (searchType == searchFile && !d.IsDir())) { + paths = append(paths, filepath.Join(root, d.Name())) + } + } + } + } + + pathPrefix := "" + if strings.HasPrefix(root, "..") { + pathPrefix = ".." + } else if strings.HasPrefix(root, ".") { + pathPrefix = "." + } + if pathPrefix != "" { + pathPrefix += string(filepath.Separator) + for i, path := range paths { + paths[i] = pathPrefix + path + } + } + return paths +} + +func filterPath(path string, opt WalkOption) (ok bool) { + if (opt.FilterFunc == nil && len(opt.Only) == 0 && len(opt.Except) == 0) || + (opt.FilterFunc != nil && opt.FilterFunc(path)) { + return true + } + + if len(opt.Except) > 0 || len(opt.Only) > 0 { + name := filepath.Base(path) + if len(opt.Except) > 0 { + if opt.CaseSensitive { + ok = true + for _, s := range opt.Except { + if s == name { + ok = false + break + } + } + } else { + ok = !inx.StringIn(name, opt.Except...) + } + } + if len(opt.Only) > 0 { + if opt.CaseSensitive { + for _, s := range opt.Only { + if s == name { + ok = true + break + } + } + } else { + ok = inx.StringIn(name, opt.Only...) + } + } + } + return +} + +// Dirs 获取指定目录下的所有目录 +func Dirs(root string, opt WalkOption) []string { + dirs := make([]string, 0) + paths := read(root, opt.Recursive, searchDir) + if len(paths) > 0 { + for _, path := range paths { + if filterPath(path, opt) && !strings.EqualFold(path, root) { + dirs = append(dirs, path) + } + } + } + return dirs +} + +// Files 获取指定目录下的所有文件 +func Files(root string, opt WalkOption) []string { + files := make([]string, 0) + paths := read(root, opt.Recursive, searchFile) + if len(paths) > 0 { + for _, path := range paths { + if filterPath(path, opt) { + files = append(files, path) + } + } + } + return files +} + +// GenerateDirNames 生成目录名 +func GenerateDirNames(s string, n, level int, caseSensitive bool) []string { + if s == "" { + return []string{} + } + + isValidCharFunc := func(r rune) bool { + return 'A' <= r && r <= 'Z' || 'a' <= r && r <= 'z' || '0' <= r && r <= '9' + } + var b strings.Builder + for _, r := range s { + if isValidCharFunc(r) { + b.WriteRune(r) + } + } + if b.Len() == 0 { + return []string{} + } + + s = b.String() // Clean s string + if !caseSensitive { + s = strings.ToLower(s) + } + if n <= 0 { + return []string{s} + } + + if level <= 0 { + level = 1 + } + names := make([]string, 0) + sLen := len(s) + for i := 0; i < sLen; i += n { + if len(names) == level { + break + } + + lastIndex := i + n + if lastIndex >= sLen { + lastIndex = sLen + } + names = append(names, s[i:lastIndex]) + } + return names +} + +// Ext 获取资源扩展名 +func Ext(path string, b []byte) string { + if path == "" && b == nil { + return "" + } + + if b == nil && filex.Exists(path) { + if b1, err := os.ReadFile(path); err == nil { + b = b1[:512] + } + } + ext := "" + if b != nil { + contentType := http.DetectContentType(b) + // https://developer.mozilla.org/zh-CN/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types + extTypes := map[string][]string{ + ".aac": {"audio/aac"}, + ".abw": {"application/x-abiword"}, + ".arc": {"application/x-freearc"}, + ".avi": {"video/x-msvideo"}, + ".azw": {"application/vnd.amazon.ebook"}, + // ".bin": {"application/octet-stream"}, + ".bmp": {"image/bmp"}, + ".bz": {"application/x-bzip"}, + ".bz2": {"application/x-bzip2"}, + ".csh": {"application/x-csh"}, + ".css": {"text/css"}, + ".csv": {"text/csv"}, + ".doc": {"application/msword"}, + ".docx": {"application/vnd.openxmlformats-officedocument.wordprocessingml.document"}, + ".eot": {"application/vnd.ms-fontobject"}, + ".epub": {"application/epub+zip"}, + ".gif": {"image/gif"}, + ".htm": {"text/html"}, + ".html": {"text/html"}, + ".ico": {"image/vnd.microsoft.icon"}, + ".ics": {"text/calendar"}, + ".jar": {"application/java-archive"}, + ".jpg": {"image/jpeg"}, + ".jpeg": {"image/jpeg"}, + ".js": {"text/javascript"}, + ".json": {"application/json"}, + ".jsonld": {"application/ld+json"}, + ".mid": {"audio/midi", "audio/x-midi"}, + ".midi": {"audio/midi", "audio/x-midi"}, + ".mjs": {"text/javascript"}, + ".mp3": {"audio/mpeg"}, + ".mpeg": {"video/mpeg"}, + ".mpkg": {"application/vnd.apple.installer+xml"}, + ".odp": {"application/vnd.oasis.opendocument.presentation"}, + ".ods": {"application/vnd.oasis.opendocument.spreadsheet"}, + ".odt": {"application/vnd.oasis.opendocument.text"}, + ".oga": {"audio/ogg"}, + ".ogv": {"video/ogg"}, + ".ogx": {"application/ogg"}, + ".otf": {"font/otf"}, + ".png": {"image/png"}, + ".pdf": {"application/pdf"}, + ".ppt": {"application/vnd.ms-powerpoint"}, + ".pptx": {"application/vnd.openxmlformats-officedocument.presentationml.presentation"}, + ".rar": {"application/x-rar-compressed"}, + ".rtf": {"application/rtf"}, + ".sh": {"application/x-sh"}, + ".svg": {"image/svg+xml"}, + ".swf": {"application/x-shockwave-flash"}, + ".tar": {"application/x-tar"}, + ".tif": {"image/tiff"}, + ".tiff": {"image/tiff"}, + ".ttf": {"font/ttf"}, + ".txt": {"text/plain"}, + ".vsd": {"application/vnd.visio"}, + ".wav": {"audio/wav"}, + ".weba": {"audio/webm"}, + ".webm": {"video/webm"}, + ".webp": {"image/webp"}, + ".woff": {"font/woff"}, + ".woff2": {"font/woff2"}, + ".xhtml": {"application/xhtml+xml"}, + ".xls": {"application/vnd.ms-excel"}, + ".xlsx": {"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"}, + ".xml": {"application/xml", "text/xml"}, + ".xul": {"application/vnd.mozilla.xul+xml"}, + ".zip": {"application/zip"}, + ".3gp": {"video/3gpp", "audio/3gpp"}, + ".3g2": {"video/3gpp2", "audio/3gpp2"}, + ".7z": {"application/x-7z-compressed"}, + } + for k, types := range extTypes { + for _, v := range types { + mime.AddExtensionType(k, v) + } + } + if extensions, err := mime.ExtensionsByType(contentType); err == nil && extensions != nil { + n := len(extensions) + if n == 1 { + ext = extensions[0] + } else { + typeExt := map[string]string{ + "text/plain; charset=utf-8": ".txt", + "image/jpeg": ".jpg", + } + if v, exists := typeExt[contentType]; exists { + ext = v + } else { + ext = extensions[0] + } + } + } + } + if ext == "" { + ext = filepath.Ext(path) + } + return ext +} diff --git a/filepathx/filepath_test.go b/filepathx/filepath_test.go new file mode 100644 index 0000000..a7de2f0 --- /dev/null +++ b/filepathx/filepath_test.go @@ -0,0 +1,240 @@ +package filepathx + +import ( + "os" + "path/filepath" + "testing" + + "git.cloudyne.io/go/hiscaler-gox/slicex" + "github.com/stretchr/testify/assert" +) + +func TestDirs(t *testing.T) { + root, _ := os.Getwd() + testCases := []struct { + Number int + Path string + Option WalkOption + Files []string + }{ + { + 1, + "/a/b", + WalkOption{}, + []string{}, + }, + { + 2, + root, + WalkOption{ + CaseSensitive: false, + FilterFunc: func(path string) bool { + return filepath.Base(path) == "2" + }, + Recursive: true, + }, + []string{"2"}, + }, + { + 3, + root, + WalkOption{ + CaseSensitive: false, + Only: []string{"2"}, + Recursive: true, + }, + []string{"2"}, + }, + { + 4, + root, + WalkOption{ + CaseSensitive: false, + Except: []string{"2"}, + Recursive: true, + }, + []string{"testdata", "1", "1.1", "1.1", "1.1.1"}, + }, + { + 5, + root, + WalkOption{ + CaseSensitive: false, + Recursive: true, + }, + []string{"testdata", "1", "1.1", "1.1", "2", "1.1.1"}, + }, + { + 6, + root + "/testdata", + WalkOption{ + Recursive: true, + }, + []string{"1", "1.1", "1.1", "2", "1.1.1"}, + }, + { + 7, + root + "/testdata", + WalkOption{ + Recursive: false, + }, + []string{"1", "2"}, + }, + } + for _, testCase := range testCases { + dirs := Dirs(testCase.Path, testCase.Option) + for i, dir := range dirs { + dirs[i] = filepath.Base(dir) + } + if !slicex.StringSliceEqual(dirs, testCase.Files, true, true, true) { + t.Errorf("%d: except %v actual %v", testCase.Number, testCase.Files, dirs) + } + } +} + +func TestFiles(t *testing.T) { + root, _ := os.Getwd() + testCases := []struct { + Number int + Path string + Option WalkOption + Files []string + }{ + { + 1, + "/a/b", + WalkOption{}, + []string{}, + }, + { + 2, + root, + WalkOption{ + CaseSensitive: false, + FilterFunc: func(path string) bool { + return filepath.Base(path) == "2.txt" + }, + Recursive: true, + }, + []string{"2.txt"}, + }, + { + 3, + root, + WalkOption{ + CaseSensitive: false, + Only: []string{"2.txt"}, + Recursive: true, + }, + []string{"2.txt"}, + }, + { + 4, + root, + WalkOption{ + CaseSensitive: false, + Except: []string{"2.txt"}, + Recursive: true, + }, + []string{"filepath.go", "filepath_test.go", "1.1.txt", "中文_ZH (1).txt", "中文_ZH (1).txt", "中文_ZH (9).txt", "0.txt"}, + }, + { + 5, + root, + WalkOption{ + CaseSensitive: false, + Recursive: true, + }, + []string{"filepath.go", "filepath_test.go", "1.1.txt", "2.txt", "中文_ZH (1).txt", "中文_ZH (1).txt", "中文_ZH (9).txt", "0.txt"}, + }, + { + 6, + root + "/testdata", + WalkOption{ + Recursive: true, + }, + []string{"1.1.txt", "2.txt", "中文_ZH (1).txt", "中文_ZH (1).txt", "中文_ZH (9).txt", "0.txt"}, + }, + { + 7, + root + "/testdata", + WalkOption{ + Recursive: false, + }, + []string{"0.txt"}, + }, + { + 8, + root + "/testdata/1/1.1/1.1", + WalkOption{ + Recursive: false, + }, + []string{"中文_ZH (1).txt"}, + }, + { + 9, + "./testdata/1/1.1/1.1", + WalkOption{ + Recursive: false, + }, + []string{"中文_ZH (1).txt"}, + }, + } + for _, testCase := range testCases { + files := Files(testCase.Path, testCase.Option) + for i, file := range files { + files[i] = filepath.Base(file) + } + if !slicex.StringSliceEqual(files, testCase.Files, true, true, true) { + t.Errorf("%d: except %v actual %v", testCase.Number, testCase.Files, files) + } + } +} + +func TestGenerateDirNames(t *testing.T) { + tests := []struct { + tag string + string string + n int + level int + caseSensitive bool + dirs []string + }{ + {"t1", "abc", 0, 1, true, []string{"abc"}}, + {"t2", "abc", 1, 1, true, []string{"a"}}, + {"t3", "abc", 1, 2, true, []string{"a", "b"}}, + {"t4", "abc", 1, 3, true, []string{"a", "b", "c"}}, + {"t5", "abc", 2, 1, true, []string{"ab"}}, + {"t6", "abc", 2, 2, true, []string{"ab", "c"}}, + {"t7", " a b c ", 2, 2, true, []string{"ab", "c"}}, + {"t7", " a b cdefghijklmn ", 2, 3, true, []string{"ab", "cd", "ef"}}, + {"t8", " a", 12, 3, true, []string{"a"}}, + {"t9", " a中文$b", 12, 3, true, []string{"ab"}}, + } + for _, test := range tests { + names := GenerateDirNames(test.string, test.n, test.level, test.caseSensitive) + assert.Equal(t, test.dirs, names, test.tag) + } +} + +func TestExt(t *testing.T) { + root, _ := os.Getwd() + tests := []struct { + tag string + path string + b []byte + ext string + }{ + {"t1", "/a/b", nil, ""}, + {"t2", "https://golang.org/doc/gopher/fiveyears.jpg", nil, ".jpg"}, + {"t3", filepath.Join(root, "/testdata/2/2.txt"), nil, ".txt"}, + {"t4", filepath.Join(root, "/testdata/2/1.jpg"), nil, ".jpg"}, + {"t5", filepath.Join(root, "/testdata/2/1.pdf"), nil, ".pdf"}, + {"t6", filepath.Join(root, "/testdata/2/1111.pdf"), nil, ".pdf"}, + {"t7", filepath.Join(root, "/testdata/1.xlsx"), nil, ".xlsx"}, + } + for _, test := range tests { + ext := Ext(test.path, test.b) + assert.Equal(t, test.ext, ext, test.tag) + } +} diff --git a/filepathx/testdata/0.txt b/filepathx/testdata/0.txt new file mode 100644 index 0000000..e69de29 diff --git a/filepathx/testdata/1/1.1/1.1.1/中文_ZH (1).txt b/filepathx/testdata/1/1.1/1.1.1/中文_ZH (1).txt new file mode 100644 index 0000000..e69de29 diff --git a/filepathx/testdata/1/1.1/1.1.1/中文_ZH (9).txt b/filepathx/testdata/1/1.1/1.1.1/中文_ZH (9).txt new file mode 100644 index 0000000..e69de29 diff --git a/filepathx/testdata/1/1.1/1.1.txt b/filepathx/testdata/1/1.1/1.1.txt new file mode 100644 index 0000000..e69de29 diff --git a/filepathx/testdata/1/1.1/1.1/中文_ZH (1).txt b/filepathx/testdata/1/1.1/1.1/中文_ZH (1).txt new file mode 100644 index 0000000..e69de29 diff --git a/filepathx/testdata/2/2.txt b/filepathx/testdata/2/2.txt new file mode 100644 index 0000000..e69de29 diff --git a/filex/file.go b/filex/file.go new file mode 100644 index 0000000..beba13e --- /dev/null +++ b/filex/file.go @@ -0,0 +1,40 @@ +package filex + +import ( + "os" +) + +// IsFile Check path is a file +func IsFile(path string) bool { + fi, err := os.Stat(path) + if err != nil { + return false + } + return !fi.IsDir() +} + +// IsDir Check path is directory +func IsDir(path string) bool { + fi, err := os.Stat(path) + if err != nil { + return false + } + return fi.IsDir() +} + +// Exists Check path is exists +func Exists(path string) bool { + _, err := os.Stat(path) + if err == nil || os.IsExist(err) { + return true + } + return false +} + +// Size Return file size +func Size(path string) int64 { + if fi, err := os.Stat(path); err == nil { + return fi.Size() + } + return 0 +} diff --git a/filex/file_test.go b/filex/file_test.go new file mode 100644 index 0000000..0d329e2 --- /dev/null +++ b/filex/file_test.go @@ -0,0 +1,66 @@ +package filex + +import ( + "os" + "testing" +) + +func TestIsDir(t *testing.T) { + root, _ := os.Getwd() + testCases := []struct { + Path string + Except bool + }{ + {"/a/b", false}, + {root, true}, + {root + "/file.go", false}, + {root + "/file", false}, + } + for _, testCase := range testCases { + v := IsDir(testCase.Path) + if v != testCase.Except { + t.Errorf("`%s` except %v actual %v", testCase.Path, testCase.Except, v) + } + } +} + +func TestIsFile(t *testing.T) { + root, _ := os.Getwd() + testCases := []struct { + Path string + Except bool + }{ + {"/a/b", false}, + {root, false}, + {root + "/file.go", true}, + {root + "/file", false}, + } + for _, testCase := range testCases { + v := IsFile(testCase.Path) + if v != testCase.Except { + t.Errorf("`%s` except %v actual %v", testCase.Path, testCase.Except, v) + } + } +} + +func TestExists(t *testing.T) { + root, _ := os.Getwd() + testCases := []struct { + Path string + Except bool + }{ + {"/a/b", false}, + {root, true}, + {root + "/file.go", true}, + {root + "/file", false}, + {root + "/1.jpg", false}, + {"https://golang.org/doc/gopher/fiveyears.jpg", false}, + {"https://golang.org/doc/gopher/not-found.jpg", false}, + } + for _, testCase := range testCases { + v := Exists(testCase.Path) + if v != testCase.Except { + t.Errorf("`%s` except %v actual %v", testCase.Path, testCase.Except, v) + } + } +} diff --git a/fmtx/fmt.go b/fmtx/fmt.go new file mode 100644 index 0000000..38e6f69 --- /dev/null +++ b/fmtx/fmt.go @@ -0,0 +1,68 @@ +package fmtx + +import ( + "encoding/json" + "fmt" + "strconv" + "strings" +) + +func toJson(prefix string, data interface{}) string { + s := "" + if b, err := json.MarshalIndent(data, "", " "); err == nil { + s = string(b) + } else { + s = fmt.Sprintf("%#v", data) + } + if prefix != "" { + s = fmt.Sprintf(`%s +%s`, prefix, s) + } + return s +} + +func SprettyPrint(a ...interface{}) string { + n := len(a) + if n == 0 { + return "" + } + values := make([]string, n) + for _, v := range a { + values = append(values, toJson("", v)) + } + return strings.Join(values, "\n") +} + +func PrettyPrint(prefix string, a ...interface{}) { + onlyOne := len(a) == 1 + for k, v := range a { + p := prefix + if p == "" { + if !onlyOne { + p = strconv.Itoa(k + 1) + } + } else { + if !onlyOne { + p = fmt.Sprintf("%s %d", prefix, k+1) + } + } + fmt.Print(toJson(prefix, v)) + } +} + +func PrettyPrintln(prefix string, a ...interface{}) { + onlyOne := len(a) == 1 + for k, v := range a { + p := prefix + if p == "" { + if !onlyOne { + p = strconv.Itoa(k + 1) + } + } else { + if !onlyOne { + p = fmt.Sprintf("%s %d", prefix, k+1) + } + } + fmt.Println(toJson(p, v)) + } +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..32d3664 --- /dev/null +++ b/go.mod @@ -0,0 +1,10 @@ +module git.cloudyne.io/go/hiscaler-gox + +go 1.23.0 + +require ( + github.com/stretchr/testify v1.7.0 + golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f + golang.org/x/text v0.3.7 + gopkg.in/guregu/null.v4 v4.0.0 +) diff --git a/htmlx/html.go b/htmlx/html.go new file mode 100644 index 0000000..731b4a5 --- /dev/null +++ b/htmlx/html.go @@ -0,0 +1,186 @@ +package htmlx + +import ( + "regexp" + "sort" + "strings" + "unicode/utf8" + + "git.cloudyne.io/go/hiscaler-gox/stringx" +) + +var ( + // Strip regexp + rxStrip = regexp.MustCompile(`(?s)||||| style=['"]+(.*)['"]+`) + + // Spaceless regexp + rxSpaceless = regexp.MustCompile(`/>\s+|| style=['"]+(.*)['"]+`) + rxCleanJavascript = regexp.MustCompile(`(?s)`) + rxCleanComment = regexp.MustCompile(`(?s)`) + rxCleanMeta = regexp.MustCompile(`(?s)`) +) + +type CleanMode uint32 + +const ( + CleanModeCSS CleanMode = 1 << (10 - iota) // 包括元素内嵌样式 + CleanModeJavascript + CleanModeComment + CleanModeMeta + CleanModeSpace + CleanModeAll = CleanModeCSS | CleanModeJavascript | CleanModeComment | CleanModeMeta | CleanModeSpace +) + +// Strip Clean html tags +// https://stackoverflow.com/questions/55036156/how-to-replace-all-html-tag-with-empty-string-in-golang +func Strip(html string) string { + html = strings.TrimSpace(html) + if html != "" { + html = rxStrip.ReplaceAllString(html, "") + } + if html == "" { + return "" + } + + const ( + htmlTagStart = 60 // Unicode `<` + htmlTagEnd = 62 // Unicode `>` + ) + // Setup a string builder and allocate enough memory for the new string. + var builder strings.Builder + builder.Grow(len(html) + utf8.UTFMax) + + in := false // True if we are inside an HTML tag. + start := 0 // The index of the previous start tag character `<` + end := 0 // The index of the previous end tag character `>` + + for i, c := range html { + // If this is the last character and we are not in an HTML tag, save it. + if (i+1) == len(html) && end >= start && c != htmlTagStart && c != htmlTagEnd { + builder.WriteString(html[end:]) + } + + // Keep going if the character is not `<` or `>` + if c != htmlTagStart && c != htmlTagEnd { + continue + } + + if c == htmlTagStart { + // Only update the start if we are not in a tag. + // This make sure we strip out `<
` not just `
` + if !in { + start = i + } + in = true + + // Write the valid string between the close and start of the two tags. + builder.WriteString(html[end:start]) + continue + } + // else c == htmlTagEnd + in = false + end = i + 1 + } + s := builder.String() + if s != "" { + s = strings.TrimSpace(Spaceless(s)) + } + return s +} + +// Spaceless 移除多余的空格 +func Spaceless(html string) string { + html = stringx.RemoveExtraSpace(html) + if html == "" { + return "" + } + + return rxSpaceless.ReplaceAllString(html, "><") +} + +func Clean(html string, cleanMode CleanMode) string { + if html == "" { + return html + } + const n = 5 + modes := [n]bool{} // css, javascript, comment, meta, space, all + for i := 0; i < n; i++ { + if cleanMode&(1< 1 { + sort.Strings(keys) + } + return keys + } + + for _, k := range fnSortedKeys(attributes) { + sb.WriteString(" ") + sb.WriteString(k) + sb.WriteString(`="`) + sb.WriteString(attributes[k]) + sb.WriteString(`"`) + } + + keys := fnSortedKeys(styles) + if len(keys) > 0 { + sb.WriteString(` style="`) + for _, k := range keys { + sb.WriteString(k) + sb.WriteString(":") + sb.WriteString(styles[k]) + sb.WriteString(`;`) + } + sb.WriteString(`"`) + } + sb.WriteString(">") + sb.WriteString(content) + sb.WriteString("") + return sb.String() +} diff --git a/htmlx/html_test.go b/htmlx/html_test.go new file mode 100644 index 0000000..2c4de68 --- /dev/null +++ b/htmlx/html_test.go @@ -0,0 +1,200 @@ +package htmlx + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func TestStrip(t *testing.T) { + tests := []struct { + tag string + html string + expected string + }{ + {"t0", "
hello
", "hello"}, + {"t1", ` + +
hello
+ +`, "hello"}, + {"t3", "
hello
", "hello"}, + {"t4", "
hello
", "hello"}, + {"t4", ` + +
hello
`, "hello"}, + {"t5", ` + + + + + + +
hello
`, "hello"}, + {"t6", ` + + + + + + + + + + + + +

Custom flags for your garden are a great way to show your personality to your friends and neighbors. Design and turn it into an eye-catching flag all year round. This will be a beautiful addition to your yard and garden, also a simple sign to show your patriotism on Memorial Day, 4th of July or Veterans Day, Christmas holidays or any holiday of the year. + +

`, "Custom flags for your garden are a great way to show your personality to your friends and neighbors. Design and turn it into an eye-catching flag all year round. This will be a beautiful addition to your yard and garden, also a simple sign to show your patriotism on Memorial Day, 4th of July or Veterans Day, Christmas holidays or any holiday of the year."}, + {"t7", "<div>hello
", "hello"}, + {"t8", "
hello world
", "hello world"}, + } + + for _, test := range tests { + equal := Strip(test.html) + assert.Equal(t, test.expected, equal, test.tag) + } +} + +func BenchmarkStrip(b *testing.B) { + for i := 0; i < b.N; i++ { + Strip(` + + + + + + + + + + + + +

Custom flags for your garden are a great way to show your personality to your friends and neighbors. Design and turn it into an eye-catching flag all year round. This will be a beautiful addition to your yard and garden, also a simple sign to show your patriotism on Memorial Day, 4th of July or Veterans Day, Christmas holidays or any holiday of the year. + +

`) + } +} + +func TestSpaceless(t *testing.T) { + tests := []struct { + tag string + html string + expected string + }{ + {"t0", "
hello
", "
hello
"}, + {"t1", ` + +
hello
+ +`, "
hello
"}, + {"t3", "
hello
", "
hello
"}, + {"t4", "
hello
", "
hello
"}, + {"t4", ` + +
hello
`, ` +
hello
`}, + {"t7", "
hello
", "
hello
"}, + {"t8", ` + + + + + + + + + + + + +

Custom flags for your garden are a great way to show your personality to your friends and neighbors. Design and turn it into an eye-catching flag all year round. This will be a beautiful addition to your yard and garden, also a simple sign to show your patriotism on Memorial Day, 4th of July or Veterans Day, Christmas holidays or any holiday of the year. + +

`, `

Custom flags for your garden are a great way to show your personality to your friends and neighbors. Design and turn it into an eye-catching flag all year round. This will be a beautiful addition to your yard and garden, also a simple sign to show your patriotism on Memorial Day, 4th of July or Veterans Day, Christmas holidays or any holiday of the year.

`}, + } + + for _, test := range tests { + html := Spaceless(test.html) + assert.Equal(t, test.expected, html, test.tag) + } +} + +func TestClean(t *testing.T) { + tests := []struct { + tag string + html string + cleanMode CleanMode + expected string + }{ + {"tcss1", "
hello
", CleanModeCSS, "
hello
"}, + {"tcss2", "
hello
", CleanModeCSS, "
hello
"}, + {"tjavascript1", `
hello
`, CleanModeJavascript, "
hello
"}, + {"tcomment1", `
hello
`, CleanModeComment, "
hello
"}, + {"tcss,javascript,comment", `
hello
`, CleanModeCSS | CleanModeJavascript | CleanModeComment, "
hello
"}, + {"tall1", `
hello
`, CleanModeAll, "
hello
"}, + {"tall2", ` + + + + + + + + + + + + +

Product details: +++ Material: 100% Ceramic +++ Size: 11oz or 15oz +++ Dye Sublimation graphics for exceptional prints. +++ Dishwasher and microwave safe. +++ Image is printed on both sides of mug. +++ Printed in the U.S.A. +++ Shipping info: Shipping time is approximately 5-7 business days. + +

`, CleanModeAll, "

Product details: +++ Material: 100% Ceramic +++ Size: 11oz or 15oz +++ Dye Sublimation graphics for exceptional prints. +++ Dishwasher and microwave safe. +++ Image is printed on both sides of mug. +++ Printed in the U.S.A. +++ Shipping info: Shipping time is approximately 5-7 business days.

"}, + {"tall3", `
1 2
2
`, CleanModeAll, `
1 2
2
`}, + } + + for _, testCase := range tests { + html := Clean(testCase.html, testCase.cleanMode) + assert.Equal(t, testCase.expected, html, testCase.tag) + } +} + +func TestTag(t *testing.T) { + tests := []struct { + tag string + elementTag string + content string + attributes map[string]string + styles map[string]string + expected string + }{ + {"t0", "div", "hello", nil, nil, "
hello
"}, + {"t1", "div", "hello", map[string]string{"id": "name"}, nil, `
hello
`}, + {"t1.1", "div", "hello", map[string]string{"id": "name", "name": "name"}, nil, `
hello
`}, + {"t2", "div", "hello", map[string]string{"id": "name", "data-tag": "123"}, map[string]string{"font-size": "1", "font-weight": "bold"}, `
hello
`}, + } + + for _, test := range tests { + equal := Tag(test.elementTag, test.content, test.attributes, test.styles) + assert.Equal(t, test.expected, equal, test.tag) + } +} + +func BenchmarkTag(b *testing.B) { + for i := 0; i < b.N; i++ { + Tag("div", "hello", map[string]string{"id": "name"}, map[string]string{"font-size": "1"}) + } +} diff --git a/inx/in.go b/inx/in.go new file mode 100644 index 0000000..22c857e --- /dev/null +++ b/inx/in.go @@ -0,0 +1,37 @@ +package inx + +import ( + "strings" +) + +// In Check value in values, return true if in values, otherwise return false. +// Value T is a generic value +func In[T comparable](value T, values []T) bool { + if values == nil || len(values) == 0 { + return false + } + for _, v := range values { + if v == value { + return true + } + } + return false +} + +// StringIn 判断 s 是否在 ss 中(忽略大小写) +func StringIn(s string, ss ...string) bool { + if len(ss) == 0 { + return false + } + for _, s2 := range ss { + if strings.EqualFold(s, s2) { + return true + } + } + return false +} + +// IntIn 判断 i 是否在 ii 中 +func IntIn(i int, ii ...int) bool { + return In(i, ii) +} diff --git a/inx/in_test.go b/inx/in_test.go new file mode 100644 index 0000000..f0d2cec --- /dev/null +++ b/inx/in_test.go @@ -0,0 +1,67 @@ +package inx + +import ( + "github.com/stretchr/testify/assert" + "sort" + "strconv" + "testing" +) + +func TestIn(t *testing.T) { + assert.Equal(t, true, In(1, []int{1, 2, 3, 4}), "int1") + assert.Equal(t, false, In(1, []int{2, 3, 4, 5}), "int2") + assert.Equal(t, false, In(1, nil), "int3") + assert.Equal(t, false, In(1, []int{}), "int4") + assert.Equal(t, true, In(1, []float64{1.0, 2.0, 3.0}), "float1") + assert.Equal(t, false, In(1.1, []float64{1.0, 2.0, 3.0}), "float2") + assert.Equal(t, true, In(true, []bool{true, false, false}), "bool1") + assert.Equal(t, false, In(true, []bool{false, false, false}), "bool2") +} + +func BenchmarkIn(b *testing.B) { + b.StopTimer() + ss := make([]string, 100000) + for i := 0; i < 100000; i++ { + ss[i] = strconv.Itoa(100000 - i) + } + sort.Strings(ss) + b.StartTimer() + In("1", ss) +} + +func BenchmarkStringIn(b *testing.B) { + b.StopTimer() + ss := make([]string, 100000) + for i := 0; i < 100000; i++ { + ss[i] = strconv.Itoa(100000 - i) + } + b.StartTimer() + StringIn("1", ss...) +} + +func TestIntIn(t *testing.T) { + testCases := []struct { + tag string + i int + ii []int + expected bool + }{ + {"t1", 1, []int{1, 2, 3, 4, 4}, true}, + {"t2", 1, []int{2, 3, 4, 5}, false}, + {"t3", 1, nil, false}, + {"t4", 1, []int{}, false}, + } + for _, testCase := range testCases { + assert.Equal(t, testCase.expected, IntIn(testCase.i, testCase.ii...), testCase.tag) + } +} + +func BenchmarkIntIn(b *testing.B) { + b.StopTimer() + ii := make([]int, 100000) + for i := 0; i < 100000; i++ { + ii[i] = 100000 - i + } + b.StartTimer() + IntIn(1, ii...) +} diff --git a/ipx/ip.go b/ipx/ip.go new file mode 100644 index 0000000..495e89a --- /dev/null +++ b/ipx/ip.go @@ -0,0 +1,123 @@ +package ipx + +import ( + "fmt" + "math" + "math/rand" + "net" + "net/http" + "strings" +) + +func RemoteAddr(r *http.Request, mustPublic bool) string { + if r == nil { + return "" + } + + for _, key := range []string{"X-Forwarded-For", "X-Real-IP", "X-Appengine-Remote-Addr"} { + value := r.Header.Get(key) + if value != "" { + for _, item := range strings.Split(value, ",") { + var ip string + if strings.ContainsRune(item, ':') { + if host, _, err := net.SplitHostPort(strings.TrimSpace(item)); err != nil { + continue + } else { + ip = host + } + } else { + ip = strings.TrimSpace(item) + } + + if mustPublic { + if v, e := IsPublic(ip); e != nil && v { + return ip + } + } else { + return ip + } + } + } + } + return r.RemoteAddr +} + +func LocalAddr() string { + addresses, err := net.InterfaceAddrs() + if err != nil { + return "" + } + + addr := "" + for _, address := range addresses { + if ipNet, ok := address.(*net.IPNet); ok && + !ipNet.IP.IsLoopback() && + !ipNet.IP.IsPrivate() && + !ipNet.IP.IsLinkLocalUnicast() { + if ipNet.IP.To4() != nil { + addr = ipNet.IP.String() + break + } + } + } + if addr == "" { + for _, address := range []string{"114.114.114.114:53", "8.8.8.8:53"} { + var conn net.Conn + conn, err = net.Dial("udp", address) + if err == nil { + conn.Close() + localAddr := conn.LocalAddr().(*net.UDPAddr) + addr = strings.Split(localAddr.String(), ":")[0] + break + } + } + + } + return addr +} + +func IsPrivate(ip string) (v bool, err error) { + addr := net.ParseIP(ip) + if addr == nil { + err = fmt.Errorf("ipx: %s address is invalid", ip) + } else { + v = addr.IsLoopback() || addr.IsPrivate() || addr.IsLinkLocalUnicast() + } + return +} + +func IsPublic(ip string) (v bool, err error) { + v, err = IsPrivate(ip) + v = err == nil && !v + return +} + +func Number(ip string) (uint, error) { + addr := net.ParseIP(ip) + if addr == nil { + return 0, fmt.Errorf("ipx: %s is invalid ip", ip) + } + return uint(addr[3]) | uint(addr[2])<<8 | uint(addr[1])<<16 | uint(addr[0])<<24, nil +} + +func Random() string { + size := 4 + ip := make([]byte, size) + for i := 0; i < size; i++ { + ip[i] = byte(rand.Intn(256)) + } + return net.IP(ip).To4().String() +} + +func String(ip uint) (string, error) { + if ip > math.MaxUint32 { + return "", fmt.Errorf("ipx: %d is not valid ipv4", ip) + } + + addr := make(net.IP, net.IPv4len) + addr[0] = byte(ip >> 24) + addr[1] = byte(ip >> 16) + addr[2] = byte(ip >> 8) + addr[3] = byte(ip) + return addr.String(), nil +} diff --git a/ipx/ip_test.go b/ipx/ip_test.go new file mode 100644 index 0000000..1aba0df --- /dev/null +++ b/ipx/ip_test.go @@ -0,0 +1,99 @@ +package ipx + +import ( + "github.com/stretchr/testify/assert" + "net/http" + "testing" +) + +func TestRemoteAddr(t *testing.T) { + request := &http.Request{ + Header: map[string][]string{}, + } + testCases := []struct { + tag string + headers map[string][]string + mustPublic bool + expected string + }{ + { + "t1", map[string][]string{ + "X-Real-IP": {"127.0.0.1"}, + "X-Forwarded-For": {"127.0.0.1"}, + }, false, "127.0.0.1", + }, + { + "t2", map[string][]string{ + "X-Real-IP": {"127.0.0.1:8080"}, + "X-Forwarded-For": {"127.0.0.1:8080"}, + }, false, "127.0.0.1", + }, + { + "t3", map[string][]string{ + "X-Real-IP": {"127.0.0.1"}, + "X-Forwarded-For": {"127.0.0.1"}, + }, true, "", + }, + { + "t4", map[string][]string{ + "X-Real-IP": {"127.0.0.1:8080"}, + "X-Forwarded-For": {"127.0.0.1:8080"}, + }, true, "", + }, + { + "t5", map[string][]string{ + "X-Real-IP": {"::1"}, + "X-Forwarded-For": {"::1"}, + }, true, "", + }, + } + for _, testCase := range testCases { + request.Header = testCase.headers + addr := RemoteAddr(request, testCase.mustPublic) + assert.Equal(t, testCase.expected, addr, testCase.tag) + } +} + +func TestLocalAddr(t *testing.T) { + ip := LocalAddr() + if ip == "" { + t.Error("LocalAddr() return empty value") + } +} + +func TestIsPrivate(t *testing.T) { + testCases := []struct { + tag string + ip string + expected bool + hasError bool + }{ + {"t1", "127.0.0.1", true, false}, + {"t2", "::1", true, false}, + {"t3", "xxx", false, true}, + } + for _, testCase := range testCases { + v, err := IsPrivate(testCase.ip) + assert.Equal(t, testCase.expected, v, testCase.tag) + assert.Equal(t, testCase.hasError, err != nil, testCase.tag+" error") + } +} + +func TestIsPublic(t *testing.T) { + testCases := []struct { + tag string + ip string + expected bool + hasError bool + }{ + {"t1", "127.0.0.1", false, false}, + {"t2", "::1", false, false}, + {"t3", "xxx", false, true}, + {"t4", "120.228.142.126", true, false}, + } + for _, testCase := range testCases { + v, err := IsPublic(testCase.ip) + assert.Equal(t, testCase.expected, v, testCase.tag) + assert.Equal(t, testCase.hasError, err != nil, testCase.tag+" error") + } +} diff --git a/isx/is.go b/isx/is.go new file mode 100644 index 0000000..e4c6d00 --- /dev/null +++ b/isx/is.go @@ -0,0 +1,177 @@ +package isx + +import ( + "bytes" + "net/url" + "reflect" + "regexp" + "runtime" + "strings" + "time" + "unicode/utf8" + + "git.cloudyne.io/go/hiscaler-gox/stringx" +) + +var ( + rxSafeCharacters = regexp.MustCompile("^[a-zA-Z0-9\\.\\-_][a-zA-Z0-9\\.\\-_]*$") + rxNumber = regexp.MustCompile("^[+-]?\\d+$|^\\d+[.]\\d+$") + rxColorHex = regexp.MustCompile("^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})$") +) + +// OS type +const ( + IsAix = "aix" + IsAndroid = "android" + IsDarwin = "darwin" + IsDragonfly = "dragonfly" + IsFreebsd = "freebsd" + IsHurd = "hurd" + IsIllumos = "illumos" + IsIos = "ios" + IsJs = "js" + IsLinux = "linux" + IsNacl = "nacl" + IsNetbsd = "netbsd" + IsOpenbsd = "openbsd" + IsPlan9 = "plan9" + IsSolaris = "solaris" + IsWindows = "windows" + IsZos = "zos" +) + +// Number Check any value is a number +func Number(i interface{}) bool { + switch i.(type) { + case string: + s := stringx.TrimAny(strings.TrimSpace(i.(string)), "+", "-") + n := len(s) + if n == 0 { + return false + } + + if strings.IndexFunc(s[n-1:], func(c rune) bool { + return c < '0' || c > '9' + }) != -1 { + return false + } + return rxNumber.MatchString(strings.ReplaceAll(s, ",", "")) + case int, int8, int16, int32, int64, + uint, uint8, uint16, uint32, uint64, uintptr, + float32, float64, + complex64, complex128: + return true + default: + return false + } +} + +// Empty 判断是否为空 +func Empty(value interface{}) bool { + if value == nil { + return true + } + + v := reflect.ValueOf(value) + switch v.Kind() { + case reflect.String, reflect.Array, reflect.Map, reflect.Slice: + return v.Len() == 0 + case reflect.Bool: + return !v.Bool() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Float32, reflect.Float64: + return v.Float() == 0 + case reflect.Invalid: + return true + case reflect.Interface, reflect.Ptr: + if v.IsNil() { + return true + } + return Empty(v.Elem().Interface()) + case reflect.Struct: + v, ok := value.(time.Time) + if ok && v.IsZero() { + return true + } + } + return false +} + +func Equal(expected interface{}, actual interface{}) bool { + if expected == nil || actual == nil { + return expected == actual + } + + if exp, ok := expected.([]byte); ok { + act, ok := actual.([]byte) + if !ok { + return false + } + + if exp == nil || act == nil { + return true + } + + return bytes.Equal(exp, act) + } + return reflect.DeepEqual(expected, actual) +} + +// SafeCharacters Only include a-zA-Z0-9.-_ +// Reference https://www.quora.com/What-are-valid-file-names +func SafeCharacters(str string) bool { + if str == "" { + return false + } + return rxSafeCharacters.MatchString(str) +} + +// HttpURL checks if the string is a HTTP URL. +// govalidator/IsURL +func HttpURL(str string) bool { + const ( + URLSchema string = `((https?):\/\/)` + URLPath string = `((\/|\?|#)[^\s]*)` + URLPort string = `(:(\d{1,5}))` + URLIP string = `([1-9]\d?|1\d\d|2[01]\d|22[0-3]|24\d|25[0-5])(\.(\d{1,2}|1\d\d|2[0-4]\d|25[0-5])){2}(?:\.([0-9]\d?|1\d\d|2[0-4]\d|25[0-5]))` + URLSubdomain string = `((www\.)|([a-zA-Z0-9]+([-_\.]?[a-zA-Z0-9])*[a-zA-Z0-9]\.[a-zA-Z0-9]+))` + URL = `^` + URLSchema + `?` + `((` + URLIP + `|(\[` + `(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))` + `\])|(([a-zA-Z0-9]([a-zA-Z0-9-_]+)?[a-zA-Z0-9]([-\.][a-zA-Z0-9]+)*)|(` + URLSubdomain + `?))?(([a-zA-Z\x{00a1}-\x{ffff}0-9]+-?-?)*[a-zA-Z\x{00a1}-\x{ffff}0-9]+)(?:\.([a-zA-Z\x{00a1}-\x{ffff}]{1,}))?))\.?` + URLPort + `?` + URLPath + `?$` + ) + + if str == "" || utf8.RuneCountInString(str) >= 2083 || len(str) <= 3 || strings.HasPrefix(str, ".") { + return false + } + if strings.HasPrefix(str, "//") { + str = "http:" + str + } + strTemp := str + if strings.Contains(str, ":") && !strings.Contains(str, "://") { + // support no indicated urlscheme but with colon for port number + // http:// is appended so url.Parse will succeed, strTemp used so it does not impact rxURL.MatchString + strTemp = "http://" + str + } + u, err := url.Parse(strTemp) + if err != nil { + return false + } + if strings.HasPrefix(u.Host, ".") { + return false + } + if u.Host == "" && (u.Path != "" && !strings.Contains(u.Path, ".")) { + return false + } + return regexp.MustCompile(URL).MatchString(str) +} + +// OS check typ is a valid OS type +// Usage: isx.OS(isx.IsLinux) +func OS(typ string) bool { + return runtime.GOOS == typ +} + +func ColorHex(s string) bool { + return rxColorHex.MatchString(s) +} diff --git a/isx/is_test.go b/isx/is_test.go new file mode 100644 index 0000000..e0fcd02 --- /dev/null +++ b/isx/is_test.go @@ -0,0 +1,231 @@ +package isx + +import ( + "github.com/stretchr/testify/assert" + "testing" + "time" +) + +func TestNumber(t *testing.T) { + uintPtr := uintptr(12) + testCases := []struct { + Value interface{} + IsNumber bool + }{ + {"a", false}, + {"111", true}, + {"1.23", true}, + {"1,234.5", true}, + {"1234.5,", false}, + {"12345.", false}, + {" 12345.6 ", true}, + {" 12345. 6 ", false}, + {"-1", true}, + {"+1", true}, + {1, true}, + {1.1, true}, + {0, true}, + {uintPtr, true}, + } + for _, testCase := range testCases { + v := Number(testCase.Value) + if v != testCase.IsNumber { + t.Errorf("%s except %v actual %v", testCase.Value, testCase.IsNumber, v) + } + } +} + +func TestEmpty(t *testing.T) { + var s1 string + var s2 = "a" + var s3 *string + s4 := struct{}{} + time1 := time.Now() + var time2 time.Time + tests := []struct { + tag string + value interface{} + empty bool + }{ + // nil + {"t0", nil, true}, + // string + {"t1.1", "", true}, + {"t1.2", "1", false}, + // slice + {"t2.1", []byte(""), true}, + {"t2.2", []byte("1"), false}, + // map + {"t3.1", map[string]int{}, true}, + {"t3.2", map[string]int{"a": 1}, false}, + // bool + {"t4.1", false, true}, + {"t4.2", true, false}, + // int + {"t5.1", 0, true}, + {"t5.2", int8(0), true}, + {"t5.3", int16(0), true}, + {"t5.4", int32(0), true}, + {"t5.5", int64(0), true}, + {"t5.6", 1, false}, + {"t5.7", int8(1), false}, + {"t5.8", int16(1), false}, + {"t5.9", int32(1), false}, + {"t5.10", int64(1), false}, + // uint + {"t6.1", uint(0), true}, + {"t6.2", uint8(0), true}, + {"t6.3", uint16(0), true}, + {"t6.4", uint32(0), true}, + {"t6.5", uint64(0), true}, + {"t6.6", uint(1), false}, + {"t6.7", uint8(1), false}, + {"t6.8", uint16(1), false}, + {"t6.9", uint32(1), false}, + {"t6.10", uint64(1), false}, + // float + {"t7.1", float32(0), true}, + {"t7.2", float64(0), true}, + {"t7.3", float32(1), false}, + {"t7.4", float64(1), false}, + // interface, ptr + {"t8.1", &s1, true}, + {"t8.2", &s2, false}, + {"t8.3", s3, true}, + // struct + {"t9.1", s4, false}, + {"t9.2", &s4, false}, + // time.Time + {"t10.1", time1, false}, + {"t10.2", &time1, false}, + {"t10.3", time2, true}, + {"t10.4", &time2, true}, + // rune + {"t11.1", 'a', false}, + // byte + {"t12.1", []byte(""), true}, + {"t12.2", []byte(" "), false}, + } + + for _, test := range tests { + empty := Empty(test.value) + assert.Equal(t, test.empty, empty, test.tag) + } +} + +func TestIsEqual(t *testing.T) { + s1 := "hello" + s2 := s1 + s3 := "hello" + t1 := time.Now() + t2 := time.Now().AddDate(0, 0, 1) + type1 := []struct { + username string + }{ + {"john"}, + } + type2 := []struct { + username string + }{ + {"john"}, + } + tests := []struct { + tag string + a interface{} + b interface{} + except bool + }{ + {"t0", nil, nil, true}, + {"t1", nil, "", false}, + {"t2", "", "", true}, + {"t3", "", " ", false}, + {"t4", s1, s2, true}, + {"t5", s2, s3, true}, + {"t6", t1, t2, false}, + {"t7", type1, type2, true}, + } + + for _, test := range tests { + equal := Equal(test.a, test.b) + assert.Equal(t, test.except, equal, test.tag) + } +} + +func TestSafeCharacters(t *testing.T) { + type testCast struct { + String string + Safe bool + } + testCasts := []testCast{ + {"", false}, + {" ", false}, + {"a", true}, + {"111", true}, + {"a", false}, + {"A_B", true}, + {"A_中B", false}, + {"a.b-c_", true}, + {"_.a.b-c_", true}, + {`\.a.b-c_`, false}, + } + for _, tc := range testCasts { + safe := SafeCharacters(tc.String) + if safe != tc.Safe { + t.Errorf("%s except %v, actual:%v", tc.String, tc.Safe, safe) + } + } +} + +func BenchmarkSafeCharacters(b *testing.B) { + for i := 0; i < b.N; i++ { + SafeCharacters("_.a.b-c_") + } +} + +func TestHttpURL(t *testing.T) { + tests := []struct { + tag string + url string + except bool + }{ + {"t0", "www.example.com", true}, + {"t1", "http://www.example.com", true}, + {"t2", "https://www.example.com", true}, + {"t3", "https://www.com", true}, + {"t4", "https://a", true}, // is valid URL? + {"t5", "https://127.0.0.1", true}, + {"t6", "https://", false}, + {"t7", "https://a", true}, + {"t8", "", false}, + {"t9", "aaa", false}, + {"t10", "https://www.example.com:8080", true}, + {"t11", "//www.example.com:8080", true}, + {"t12", "//a.b", true}, + } + + for _, test := range tests { + equal := HttpURL(test.url) + assert.Equal(t, test.except, equal, test.tag) + } +} + +func TestColorHex(t *testing.T) { + tests := []struct { + tag string + color string + except bool + }{ + {"t0", "#fff", true}, + {"t1", "#ffffff", true}, + {"t2", "#000000", true}, + {"t3", "#ffff", false}, + {"t4", "ffffff", false}, + {"t5", "#ggg", false}, + {"t6", "#-100000", false}, + } + + for _, test := range tests { + equal := ColorHex(test.color) + assert.Equal(t, test.except, equal, test.tag) + } +} diff --git a/jsonx/json.go b/jsonx/json.go new file mode 100644 index 0000000..2d1b88d --- /dev/null +++ b/jsonx/json.go @@ -0,0 +1,157 @@ +package jsonx + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "reflect" + "strings" +) + +func ToRawMessage(i interface{}, defaultValue string) (json.RawMessage, error) { + m := json.RawMessage{} + var b []byte + var err error + b, err = json.Marshal(&i) + if err != nil { + return m, err + } + + b = bytes.TrimSpace(b) + if len(b) == 0 || bytes.EqualFold(b, []byte("null")) { + b = []byte(defaultValue) + } + err = m.UnmarshalJSON(b) + return m, err +} + +// ToJson Change interface to json string +func ToJson(i interface{}, defaultValue string) string { + if i == nil { + return defaultValue + } + vo := reflect.ValueOf(i) + switch vo.Kind() { + case reflect.Chan, reflect.Func, reflect.Map, reflect.Ptr, reflect.UnsafePointer, reflect.Interface, reflect.Slice: + if vo.IsNil() { + return defaultValue + } + default: + } + + b, err := json.Marshal(i) + if err != nil { + return defaultValue + } + var buf bytes.Buffer + err = json.Compact(&buf, b) + if err != nil { + return defaultValue + } + if json.Valid(buf.Bytes()) { + return buf.String() + } + return defaultValue +} + +func ToPrettyJson(i interface{}) string { + if i == nil { + return "null" + } + vo := reflect.ValueOf(i) + switch vo.Kind() { + case reflect.Chan, reflect.Func, reflect.Map, reflect.Ptr, reflect.UnsafePointer, reflect.Interface, reflect.Slice: + if vo.IsNil() { + return "null" + } + default: + } + + b, err := json.Marshal(i) + if err != nil { + return fmt.Sprintf("%+v", i) + } + var buf bytes.Buffer + err = json.Indent(&buf, b, "", " ") + if err != nil { + return fmt.Sprintf("%+v", i) + } + return buf.String() +} + +// EmptyObjectRawMessage 空对象 +func EmptyObjectRawMessage() json.RawMessage { + v := json.RawMessage{} + _ = v.UnmarshalJSON([]byte("{}")) + + return v +} + +// EmptyArrayRawMessage 空数组 +func EmptyArrayRawMessage() json.RawMessage { + v := json.RawMessage{} + _ = v.UnmarshalJSON([]byte("[]")) + return v +} + +// IsEmptyRawMessage 验证数据是否为空 +func IsEmptyRawMessage(data json.RawMessage) bool { + if data == nil { + return true + } + + b, err := data.MarshalJSON() + if err != nil { + return true + } + + s := string(bytes.TrimSpace(b)) + if s == "" || s == "[]" || s == "{}" || strings.EqualFold(s, "null") { + return true + } + + if strings.Index(s, " ") != -1 { + s = strings.ReplaceAll(s, " ", "") + } + return s == "[]" || s == "{}" +} + +func Convert(from json.RawMessage, to any) error { + if IsEmptyRawMessage(from) { + return nil + } + + var b []byte + b, err := from.MarshalJSON() + if err != nil { + return err + } + + return json.Unmarshal(b, &to) +} + +// Extract 提取字符串中的有效 JSON 数据 +// 比如 `{"a": 1, "b": 2}}}}a` 提取后的数据为 `{"a": 1, "b": 2}` +func Extract(str string) (string, error) { + str = strings.TrimSpace(str) + n := len(str) + if n == 0 { + return "", errors.New("jsonx: empty string") + } + if json.Valid([]byte(str)) { + return str, nil + } + + for i := 0; i < n; i++ { + if str[i] == '{' || str[i] == '[' { + for j := n; j > i; j-- { + substr := str[i:j] + if json.Valid([]byte(substr)) { + return substr, nil + } + } + } + } + return "", errors.New("jsonx: not found") +} diff --git a/jsonx/json_test.go b/jsonx/json_test.go new file mode 100644 index 0000000..8923167 --- /dev/null +++ b/jsonx/json_test.go @@ -0,0 +1,166 @@ +package jsonx + +import ( + "encoding/json" + "github.com/stretchr/testify/assert" + "strings" + "testing" +) + +func TestToJson(t *testing.T) { + var names []string + testCases := []struct { + Number int + Value interface{} + DefaultValue string + Except string + }{ + {1, []string{}, "[]", "[]"}, + {2, struct{}{}, "", "{}"}, + {3, struct { + Name string + Age int + }{"Hello", 12}, "", `{"Name":"hello","Age":12}`}, + {4, struct { + Name string `json:"a"` + Age int `json:"b"` + }{"Hello", 12}, "", `{"a":"hello","b":12}`}, + {5, nil, "abc", "abc"}, + {6, []int{1, 2}, "null", "[1,2]"}, + {7, []string{"a", "b"}, "null", `["a","b"]`}, + {8, 1, "[]", "1"}, + {9, "abc", "[]", `"abc"`}, + {10, nil, "[]", `[]`}, + {11, names, "[]", `[]`}, + } + for _, testCase := range testCases { + s := ToJson(testCase.Value, testCase.DefaultValue) + if !strings.EqualFold(s, testCase.Except) { + t.Errorf("%d %#v except: %s actual: %s", testCase.Number, testCase.Value, testCase.Except, s) + } + } +} + +func TestEmptyObject(t *testing.T) { + result := "{}" + if b, err := EmptyObjectRawMessage().MarshalJSON(); err == nil { + eValue := string(b) + if !strings.EqualFold(eValue, result) { + t.Errorf("Excepted value: %s, actual value: %s", eValue, result) + } + } else { + t.Errorf("Error: %s", err.Error()) + } +} + +func TestEmptyArray(t *testing.T) { + result := "[]" + if b, err := EmptyArrayRawMessage().MarshalJSON(); err == nil { + eValue := string(b) + if !strings.EqualFold(eValue, result) { + t.Errorf("Excepted value: %s, actual value: %s", eValue, result) + } + } else { + t.Errorf("Error: %s", err.Error()) + } +} + +func TestIsEmptyRawMessage(t *testing.T) { + type testCase struct { + Number int + Value json.RawMessage + Empty bool + } + v1, _ := ToRawMessage([]string{}, "[]") + v2, _ := ToRawMessage([]string{"a", "b"}, "[]") + v3, _ := ToRawMessage([]int{1, 2, 3}, "[]") + v4, _ := ToRawMessage(struct { + Name string + Age int + }{"John", 10}, "{}") + v5, _ := ToRawMessage(nil, "[]") + a := json.RawMessage{} + a.UnmarshalJSON([]byte("null")) + b := json.RawMessage{} + b.UnmarshalJSON([]byte("")) + c := json.RawMessage{} + c.UnmarshalJSON([]byte("[ ]")) + testCases := []testCase{ + {1, json.RawMessage{}, true}, + {2, EmptyObjectRawMessage(), true}, + {3, EmptyArrayRawMessage(), true}, + {4, v1, true}, + {5, v2, false}, + {6, v3, false}, + {7, v4, false}, + {8, v5, true}, + {9, a, true}, + {10, b, true}, + {11, c, true}, + } + + for _, tc := range testCases { + v := IsEmptyRawMessage(tc.Value) + if v != tc.Empty { + t.Errorf("%d except: %v, actual: %v", tc.Number, tc.Empty, v) + } + } +} + +func TestConvert(t *testing.T) { + testCases := []struct { + Number int + From json.RawMessage + Except any + }{ + {1, nil, struct{}{}}, + {2, EmptyArrayRawMessage(), []struct{}{}}, + {3, []byte(`{"ID":1,"Name":"hiscaler"}`), struct { + ID int + Name string + }{}}, + {4, []byte(`{"ID":1,"Name":"hiscaler","age":1}`), struct { + ID int + Name string + age int + }{}}, + } + for _, testCase := range testCases { + exceptValue := testCase.Except + err := Convert(testCase.From, &exceptValue) + assert.Equalf(t, nil, err, "Test %d", testCase.Number) + actualValue := "" + if testCase.From != nil { + actualValue = ToJson(exceptValue, "null") + } + t.Logf(` +#%d %s + ↓ + %#v`, testCase.Number, testCase.From, exceptValue) + assert.Equalf(t, string(testCase.From), actualValue, "Test %d", testCase.Number) + } +} + +func TestExtract(t *testing.T) { + testCases := []struct { + Number int + From string + Except string + HasError bool + }{ + {1, "", "", true}, + {2, "{}", "{}", false}, + {3, " {} ", "{}", false}, + {4, `{"a": 1, "b": 2}`, `{"a": 1, "b": 2}`, false}, + {5, `{"a": 1, "b": 2}}}}a`, `{"a": 1, "b": 2}`, false}, + {6, `{"a": 1, "b": 2}}}}`, `{"a": 1, "b": 2}`, false}, + {7, "[]", "[]", false}, + {8, "[]1[]2", "[]", false}, + } + for _, testCase := range testCases { + exceptValue := testCase.Except + actualValue, err := Extract(testCase.From) + assert.Equalf(t, testCase.HasError, err != nil, "Test HasError %d", testCase.Number) + assert.Equalf(t, exceptValue, actualValue, "Test Value %d", testCase.Number) + } +} diff --git a/jsonx/parser.go b/jsonx/parser.go new file mode 100644 index 0000000..adffc76 --- /dev/null +++ b/jsonx/parser.go @@ -0,0 +1,246 @@ +package jsonx + +import ( + "encoding/json" + "reflect" + "strconv" + "strings" + + "git.cloudyne.io/go/hiscaler-gox/bytex" + "git.cloudyne.io/go/hiscaler-gox/stringx" +) + +// Parser is a json string parse helper and not required define struct. +// You can use Find() method get the path value, and convert to string, int, int64, float32, float64, bool value. +// And you can use Exists() method check path is exists +// Usage: +// parser := jsonx.NewParser("[0,1,2]") +// parser.Find("1").Int() // Return 1, founded +// parser.Find("10", 0).Int() // Return 0 because not found, you give a default value 0 + +type Parser struct { + data reflect.Value + value reflect.Value +} + +type ParseFinder Parser + +func (pf ParseFinder) Interface() interface{} { + return pf.value.Interface() +} + +func (pf ParseFinder) String() string { + switch pf.value.Kind() { + case reflect.Invalid: + return "" + default: + return stringx.String(pf.value.Interface()) + } +} + +func (pf ParseFinder) Float32() float32 { + switch pf.value.Kind() { + case reflect.Invalid: + return 0 + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return float32(pf.value.Int()) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return float32(pf.value.Uint()) + case reflect.Float32, reflect.Float64: + return float32(pf.value.Float()) + case reflect.Bool: + if pf.value.Bool() { + return 1 + } + return 0 + case reflect.String: + d, err := strconv.ParseFloat(pf.value.String(), 32) + if err != nil { + return 0 + } + return float32(d) + default: + return 0 + } +} + +func (pf ParseFinder) Float64() float64 { + switch pf.value.Kind() { + case reflect.Invalid: + return 0 + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return float64(pf.value.Int()) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return float64(pf.value.Uint()) + case reflect.Float32, reflect.Float64: + return pf.value.Float() + case reflect.Bool: + if pf.value.Bool() { + return 1 + } + return 0 + case reflect.String: + d, _ := strconv.ParseFloat(pf.value.String(), 64) + return d + default: + return 0 + } +} + +func (pf ParseFinder) Int() int { + switch pf.value.Kind() { + case reflect.Invalid: + return 0 + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return int(pf.value.Int()) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return int(pf.value.Uint()) + case reflect.Float32, reflect.Float64: + return int(pf.value.Float()) + case reflect.Bool: + if pf.value.Bool() { + return 1 + } + return 0 + case reflect.String: + d, _ := strconv.Atoi(pf.value.String()) + return d + default: + return 0 + } +} + +func (pf ParseFinder) Int64() int64 { + switch pf.value.Kind() { + case reflect.Invalid: + return 0 + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return pf.value.Int() + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return int64(pf.value.Uint()) + case reflect.Float32, reflect.Float64: + return int64(pf.value.Float()) + case reflect.Bool: + if pf.value.Bool() { + return 1 + } + return 0 + case reflect.String: + d, err := strconv.ParseInt(pf.value.String(), 10, 64) + if err != nil { + return 0 + } + return d + default: + return 0 + } +} + +func (pf ParseFinder) Bool() bool { + switch pf.value.Kind() { + case reflect.Invalid: + return false + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return pf.value.Int() > 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return pf.value.Uint() > 0 + case reflect.Float32, reflect.Float64: + return pf.value.Float() > 0 + case reflect.Bool: + return pf.value.Bool() + case reflect.String: + v, _ := strconv.ParseBool(pf.value.String()) + return v + default: + return false + } +} + +func getElement(v reflect.Value, p string) reflect.Value { + switch v.Kind() { + case reflect.Map: + vv := v.MapIndex(reflect.ValueOf(p)) + if vv.Kind() == reflect.Interface { + vv = vv.Elem() + } + return vv + case reflect.Array, reflect.Slice: + if i, err := strconv.Atoi(p); err == nil { + if i >= 0 && i < v.Len() { + v = v.Index(i) + for v.Kind() == reflect.Interface { + v = v.Elem() + } + return v + } + } + } + return reflect.Value{} +} + +func NewParser(s string) *Parser { + p := &Parser{} + return p.LoadString(s) +} + +func (p *Parser) LoadString(s string) *Parser { + return p.LoadBytes(stringx.ToBytes(s)) +} + +func (p *Parser) LoadBytes(bytes []byte) *Parser { + if bytex.IsBlank(bytes) { + return p + } + + var sd interface{} + if err := json.Unmarshal(bytes, &sd); err != nil { + return p + } + p.data = reflect.ValueOf(sd) + return p +} + +func (p Parser) Exists(path string) bool { + if !p.data.IsValid() || path == "" { + return false + } + + data := p.data + parts := strings.Split(path, ".") + n := len(parts) + for i := 0; i < n; i++ { + if data = getElement(data, parts[i]); !data.IsValid() { + return false + } + if i == n-1 { + // is last path + return true + } + } + return false +} + +func (p *Parser) Find(path string, defaultValue ...interface{}) *ParseFinder { + if len(defaultValue) > 0 { + p.value = reflect.ValueOf(defaultValue[0]) + } + if !p.data.IsValid() || path == "" { + return (*ParseFinder)(p) + } + + data := p.data + // find the value corresponding to the path + // if any part of path cannot be located, return the default value + parts := strings.Split(path, ".") + n := len(parts) + for i := 0; i < n; i++ { + if data = getElement(data, parts[i]); !data.IsValid() { + return (*ParseFinder)(p) + } + if i == n-1 { + // is last path + p.value = data + } + } + return (*ParseFinder)(p) +} diff --git a/jsonx/parser_test.go b/jsonx/parser_test.go new file mode 100644 index 0000000..5776259 --- /dev/null +++ b/jsonx/parser_test.go @@ -0,0 +1,78 @@ +package jsonx + +import ( + "github.com/stretchr/testify/assert" + "reflect" + "testing" +) + +func TestParser_Find(t *testing.T) { + testCases := []struct { + tag string + json string + path string + defaultValue interface{} + valueKind reflect.Kind + Except interface{} + }{ + {"string1", "", "a", "", reflect.String, ""}, + {"string2", `{"a":1}`, "a", 2, reflect.String, "1"}, + {"string3", `{"a":true}`, "a", 2, reflect.String, "true"}, + {"string4", `{"a":true}`, "a.b", false, reflect.String, "false"}, + {"string5", `{"a":{"b": {"c": 123}}}`, "a.b", "{}", reflect.String, `{"c":123}`}, + {"string6", `{"a":{"b": {"c": 123}}}`, "a.b.c", "", reflect.String, "123"}, + {"string7", `{"a":{"b": {"c": [1,2,3]}}}`, "a.b.c.0", "", reflect.String, "1"}, + {"string8", `{"a":{"b": {"c": [1,2,3]}}}`, "a.b.c.2", "", reflect.String, "3"}, + {"string9", `{"a":{"b": {"c": [1,2,3]}}}`, "", "110", reflect.String, "110"}, + {"int1", `{"a":1}`, "a", 2, reflect.Int, 1}, + {"int2", `{"a":1}`, "aa", 2, reflect.Int, 2}, + {"int641", `{"a":1}`, "a", 2, reflect.Int64, int64(1)}, + {"int641", `{"a":1}`, "aa", 2, reflect.Int64, int64(2)}, + {"bool1", `{"a":true}`, "a", false, reflect.Bool, true}, + {"bool2", `{"a":true}`, "a.b", false, reflect.Bool, false}, + {"float321", `{"a":1.23}`, "a", 0, reflect.Float32, float32(1.23)}, + {"float322", `{"a":1.23}`, "b", 0, reflect.Float32, float32(0)}, + {"float641", `{"a":1.23}`, "a", 0, reflect.Float64, 1.23}, + {"float642", `{"a":1.23}`, "b", 0, reflect.Float64, 0.0}, + {"interface1", `{"a":1.23}`, "b", 0, reflect.Interface, 0}, + {"interface2", `null`, "b", 0, reflect.Interface, 0}, + } + for _, testCase := range testCases { + var v interface{} + switch testCase.valueKind { + case reflect.String: + v = NewParser(testCase.json).Find(testCase.path, testCase.defaultValue).String() + case reflect.Int: + v = NewParser(testCase.json).Find(testCase.path, testCase.defaultValue).Int() + case reflect.Int64: + v = NewParser(testCase.json).Find(testCase.path, testCase.defaultValue).Int64() + case reflect.Float32: + v = NewParser(testCase.json).Find(testCase.path, testCase.defaultValue).Float32() + case reflect.Float64: + v = NewParser(testCase.json).Find(testCase.path, testCase.defaultValue).Float64() + case reflect.Bool: + v = NewParser(testCase.json).Find(testCase.path, testCase.defaultValue).Bool() + case reflect.Interface: + v = NewParser(testCase.json).Find(testCase.path, testCase.defaultValue).Interface() + } + assert.Equal(t, testCase.Except, v, testCase.tag) + } +} + +func TestParser_Exists(t *testing.T) { + testCases := []struct { + tag string + json string + path string + Except bool + }{ + {"exists1", "", "a", false}, + {"exists2", `{"a"}`, "a", false}, + {"exists3", `{"a":1}`, "a", true}, + {"exists4", `{"a":[0,1,2]}`, "a.1", true}, + } + for _, testCase := range testCases { + v := NewParser(testCase.json).Exists(testCase.path) + assert.Equal(t, testCase.Except, v, testCase.tag) + } +} diff --git a/keyx/key.go b/keyx/key.go new file mode 100644 index 0000000..e9ad638 --- /dev/null +++ b/keyx/key.go @@ -0,0 +1,70 @@ +package keyx + +import ( + "reflect" + "sort" + "strconv" + "strings" +) + +// Generate 生成 Key +func Generate(values ...interface{}) string { + var sb strings.Builder + for _, value := range values { + v := reflect.ValueOf(value) + switch v.Kind() { + case reflect.String: + if v.Len() != 0 { + sb.WriteString(v.String()) + } + case reflect.Bool: + sb.WriteString(strconv.FormatBool(v.Bool())) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + if b, ok := value.(rune); ok { + sb.WriteRune(b) + } else { + sb.WriteString(strconv.FormatInt(v.Int(), 10)) + } + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + if b, ok := value.(byte); ok { + sb.WriteByte(b) + } else { + sb.WriteString(strconv.FormatUint(v.Uint(), 10)) + } + case reflect.Float32, reflect.Float64: + sb.WriteString(strconv.FormatFloat(v.Float(), 'f', -1, 64)) + case reflect.Map: + keys := make([]string, len(v.MapKeys())) + i := 0 + for _, mv := range v.MapKeys() { + keys[i] = mv.String() + i++ + } + sort.Strings(keys) + interfaces := make([]interface{}, 0) + for k := range keys { + interfaces = append(interfaces, keys[k], v.MapIndex(reflect.ValueOf(keys[k])).Interface()) + } + sb.WriteString(Generate(interfaces...)) + case reflect.Slice, reflect.Array: + interfaces := make([]interface{}, 0) + for i := 0; i < v.Len(); i++ { + interfaces = append(interfaces, v.Index(i).Interface()) + } + sb.WriteString(Generate(interfaces...)) + case reflect.Struct: + kv := map[string]interface{}{} + t := reflect.TypeOf(value) + if t.Name() != "" { + sb.WriteString(t.Name() + ":") + } + for k := 0; k < t.NumField(); k++ { + kv[t.Field(k).Name] = v.Field(k).Interface() + } + sb.WriteString(Generate(kv)) + default: + sb.WriteString(v.String()) + } + } + return sb.String() +} diff --git a/keyx/key_test.go b/keyx/key_test.go new file mode 100644 index 0000000..b2e60a3 --- /dev/null +++ b/keyx/key_test.go @@ -0,0 +1,63 @@ +package keyx + +import ( + "testing" +) + +func TestGenerate(t *testing.T) { + type User struct { + ID int + Name string + } + type testCase struct { + Number int + Values interface{} + Key string + } + + b1 := []byte("") + b2 := []byte("abc") + testCases := []testCase{ + {1, []interface{}{1, 2, 3}, "123"}, + {2, []interface{}{0, -1, 2, 3}, "0-123"}, + {3, []interface{}{1.1, 2.12, 3.123}, "1.12.123.123"}, + {4, []interface{}{1.1, 2.12, 3.123}, "1.12.123.123"}, + {5, []interface{}{"a", "b", "c"}, "abc"}, + {6, []interface{}{"a", "b", "c", 1, 2, 3}, "abc123"}, + {7, []interface{}{true, true, false, false}, "truetruefalsefalse"}, + {8, []interface{}{[]int{1, 2, 3}}, "123"}, + {9, []interface{}{[...]int{1, 2, 3, 4}}, "1234"}, + {10, []interface{}{struct { + Username string + Age int + }{}}, "Age0Username"}, + {11, []interface{}{struct { + Username string + Age int + }{"John", 12}}, "Age12UsernameJohn"}, + {12, []interface{}{User{ + ID: 1, + Name: "John", + }}, "User:ID1NameJohn"}, + // byte + {13, []interface{}{b1, b2}, "abc"}, + // rune + {14, []interface{}{'a', 'b', 'c'}, "abc"}, + {15, []map[string]string{{"k1": "v1", "k2": "v2"}}, "k1v1k2v2"}, + } + for _, tc := range testCases { + key := Generate(tc.Values) + if key != tc.Key { + t.Errorf("%d: except:%s actual:%s", tc.Number, tc.Key, key) + } + } +} + +func BenchmarkGenerate(b *testing.B) { + for i := 0; i < b.N; i++ { + Generate([]interface{}{struct { + Username string + Age int + }{"John", 12}}) + } +} diff --git a/mapx/map.go b/mapx/map.go new file mode 100644 index 0000000..39d3b1a --- /dev/null +++ b/mapx/map.go @@ -0,0 +1,52 @@ +package mapx + +import ( + "net/url" + "reflect" + "sort" + "strconv" +) + +// Keys 获取 map 键值(默认按照升序排列) +func Keys(m interface{}) []string { + var keys []string + vo := reflect.ValueOf(m) + if vo.Kind() == reflect.Map { + mapKeys := vo.MapKeys() + keys = make([]string, len(mapKeys)) + for k, v := range mapKeys { + var vString string + switch v.Type().Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + vString = strconv.FormatInt(v.Int(), 10) + case reflect.Float32, reflect.Float64: + vString = strconv.FormatFloat(v.Float(), 'f', -1, 64) + case reflect.Bool: + if v.Bool() { + vString = "1" + } else { + vString = "0" + } + default: + vString = v.String() + } + keys[k] = vString + } + if len(keys) > 0 { + sort.Strings(keys) + } + } + return keys +} + +func StringMapStringEncode(params map[string]string) string { + if len(params) == 0 { + return "" + } + + values := url.Values{} + for k, v := range params { + values.Add(k, v) + } + return values.Encode() +} diff --git a/mapx/map_test.go b/mapx/map_test.go new file mode 100644 index 0000000..1176c1c --- /dev/null +++ b/mapx/map_test.go @@ -0,0 +1,60 @@ +package mapx + +import ( + "fmt" + "github.com/stretchr/testify/assert" + "testing" +) + +func TestKeys(t *testing.T) { + tests := []struct { + tag string + value interface{} + keys []string + }{ + {"t0", nil, nil}, + {"t1", map[string]interface{}{"a": 1, "b": 2}, []string{"a", "b"}}, + {"t2", map[string]interface{}{"b": 1, "a": 2}, []string{"a", "b"}}, + {"t3", map[string]interface{}{"a": 1, "b": 2, "": 3}, []string{"", "a", "b"}}, + {"t4", map[string]string{"a": "1", "b": "2", "": "3"}, []string{"", "a", "b"}}, + {"t4", map[int]string{1: "1", 3: "3", 2: "2"}, []string{"1", "2", "3"}}, + {"t4", map[float64]string{1.1: "1", 3: "3", 2: "2"}, []string{"1.1", "2", "3"}}, + {"t4", map[bool]string{true: "1", false: "3"}, []string{"0", "1"}}, + } + + for _, test := range tests { + keys := Keys(test.value) + v := assert.Equal(t, test.keys, keys, test.tag) + if v { + for k, value := range test.keys { + assert.Equal(t, value, keys[k], fmt.Sprintf("keys[%d]", k)) + } + } + } +} + +func BenchmarkKeys(b *testing.B) { + for i := 0; i < b.N; i++ { + Keys(map[interface{}]interface{}{"a": 1, "b": 2, "c": "cValue", "d": "dValue", 1: 1, 2: 2}) + } +} + +func TestStringMapStringEncode(t *testing.T) { + tests := []struct { + tag string + value map[string]string + expected string + }{ + {"t0", nil, ""}, + {"t1", map[string]string{"a": "1", "b": "2"}, "a=1&b=2"}, + {"t2", map[string]string{"b": "1", "a": "2"}, "a=2&b=1"}, + {"t3", map[string]string{"a": "1", "b": "2", "c": "3"}, "a=1&b=2&c=3"}, + {"t4", map[string]string{"a": "1", "b": "2", "": "3"}, "=3&a=1&b=2"}, + {"t4", map[string]string{"1": "1", "3": "3", "2": "2"}, "1=1&2=2&3=3"}, + } + + for _, test := range tests { + s := StringMapStringEncode(test.value) + assert.Equal(t, test.expected, s, test.tag) + } +} diff --git a/net/urlx/url.go b/net/urlx/url.go new file mode 100644 index 0000000..03389ad --- /dev/null +++ b/net/urlx/url.go @@ -0,0 +1,91 @@ +package urlx + +import ( + "net/url" + "strings" + + "git.cloudyne.io/go/hiscaler-gox/isx" +) + +type URL struct { + Path string // URL path + URL *url.URL // A url.URL represents + Invalid bool // Path is a valid url + values url.Values // Query values +} + +func NewURL(path string) *URL { + u := &URL{ + Path: path, + Invalid: false, + values: url.Values{}, + } + if v, err := url.Parse(u.Path); err == nil { + u.URL = v + u.Invalid = true + if values, err := url.ParseQuery(v.RawQuery); err == nil { + u.values = values + } + } + return u +} + +func (u URL) GetValue(key, defaultValue string) string { + v := u.values.Get(key) + if v == "" { + v = defaultValue + } + return v +} + +func (u URL) SetValue(key, value string) URL { + u.values.Set(key, value) + return u +} + +func (u URL) AddValue(key, value string) URL { + u.values.Add(key, value) + return u +} + +func (u URL) DelKey(key string) URL { + u.values.Del(key) + return u +} + +func (u URL) HasKey(key string) bool { + return u.values.Has(key) +} + +func (u URL) String() string { + s := u.URL.String() + rawQuery := u.URL.RawQuery + if rawQuery == "" { + if len(u.values) > 0 { + s += "?" + u.values.Encode() + } + } else { + s = strings.Replace(s, rawQuery, u.values.Encode(), 1) + } + return s +} + +// IsAbsolute 是否为绝对地址 +func IsAbsolute(s string) bool { + if strings.HasPrefix(s, "//") { + s = "http:" + s + } + if isx.HttpURL(s) { + if u, err := url.Parse(s); err == nil { + if u.IsAbs() && len(u.Host) > 2 && strings.Index(u.Host, ".") > 0 { + return true + } + } + } + return false +} + +// IsRelative 是否为相对地址 +func IsRelative(url string) bool { + return !IsAbsolute(url) +} diff --git a/net/urlx/url_test.go b/net/urlx/url_test.go new file mode 100644 index 0000000..6962fe6 --- /dev/null +++ b/net/urlx/url_test.go @@ -0,0 +1,88 @@ +package urlx + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func TestURL_AddValue(t *testing.T) { + type testCase struct { + Number int + Path string + Values map[string]string + Except string + } + testCases := []testCase{ + {1, "https://www.example.com/a/b/c/1.txt?a=1&b=2", map[string]string{"a": "11", "b": "22"}, "https://www.example.com/a/b/c/1.txt?a=11&b=22"}, + {1, "https://www.example.com/a/b/c/1.txt?a=1&b=2&c=3", map[string]string{"a": "11", "c": "33"}, "https://www.example.com/a/b/c/1.txt?a=11&b=2&c=33"}, + {2, "https://www.example.com/a/b/c/1.txt?a=1&b=2#abc", map[string]string{"a": "11"}, "https://www.example.com/a/b/c/1.txt?a=11&b=2#abc"}, + {3, "https://www.example.com/a/b/c/1.txt?a=1&b=2#abc", map[string]string{"A": "11"}, "https://www.example.com/a/b/c/1.txt?A=11&a=1&b=2#abc"}, + {4, "https://www.example.com/a/b/c/1.txt?b=1&a=2#abc", map[string]string{"A": "11"}, "https://www.example.com/a/b/c/1.txt?A=11&a=2&b=1#abc"}, + {5, "https://www.example.com", map[string]string{"A": "11"}, "https://www.example.com?A=11"}, + {6, "https://www.example.com/", map[string]string{"A": "11"}, "https://www.example.com/?A=11"}, + } + + for _, tc := range testCases { + url := NewURL(tc.Path) + for k, v := range tc.Values { + url.SetValue(k, v) + } + s := url.String() + if s != tc.Except { + t.Errorf("%d except: %s, actual: %s", tc.Number, tc.Except, s) + } + } +} + +func TestURL_DeleteValue(t *testing.T) { + type testCase struct { + Number int + Path string + DeleteKeys []string + Except string + } + testCases := []testCase{ + {1, "https://www.example.com/a/b/c/1.txt?a=1&b=2#abc", []string{"a", "b"}, "https://www.example.com/a/b/c/1.txt?#abc"}, + {1, "https://www.example.com/a/b/c/1.txt?a=1&b=2#abc", []string{"a"}, "https://www.example.com/a/b/c/1.txt?b=2#abc"}, + {2, "https://www.example.com/a/b/c/1.txt", []string{"a", "b"}, "https://www.example.com/a/b/c/1.txt"}, + {2, "https://www/a/b/c/1.txt", []string{"a", "b"}, "https://www/a/b/c/1.txt"}, + } + + for _, tc := range testCases { + url := NewURL(tc.Path) + for _, v := range tc.DeleteKeys { + url.DelKey(v) + } + s := url.String() + if s != tc.Except { + t.Errorf("%d except: %s, actual: %s", tc.Number, tc.Except, s) + } + } +} + +func TestIsAbsolute(t *testing.T) { + testCases := []struct { + tag string + url string + isAbs bool + }{ + {"t0.1", "https://www.a.com", true}, + {"t0.2", "http://www.a.com", true}, + {"t0.3", "//www.a.com", true}, + {"t0.4", "//a.b", true}, + {"t0.5", "//abc", false}, + {"t0.6", "//abc...", false}, + {"t0.7", "//.a.b", false}, + {"t0.8", "//a.b..", false}, + + {"t1.1", "httpa.com", false}, + {"t1.2", "httpa.com//", false}, + {"t1.3", "//", false}, + {"t1.4", "//a", false}, + {"t1.5", "//....a", false}, + } + for _, testCase := range testCases { + isAbs := IsAbsolute(testCase.url) + assert.Equal(t, testCase.isAbs, isAbs, testCase.tag) + } +} diff --git a/nullx/string.go b/nullx/string.go new file mode 100644 index 0000000..719a063 --- /dev/null +++ b/nullx/string.go @@ -0,0 +1,18 @@ +package nullx + +import ( + "gopkg.in/guregu/null.v4" + "strings" +) + +func StringFrom(s string) null.String { + s = strings.TrimSpace(s) + if s == "" { + return NullString() + } + return null.NewString(s, true) +} + +func NullString() null.String { + return null.NewString("", false) +} diff --git a/nullx/time.go b/nullx/time.go new file mode 100644 index 0000000..050983c --- /dev/null +++ b/nullx/time.go @@ -0,0 +1,17 @@ +package nullx + +import ( + "gopkg.in/guregu/null.v4" + "time" +) + +func TimeFrom(t time.Time) null.Time { + if t.IsZero() { + return NullTime() + } + return null.TimeFrom(t) +} + +func NullTime() null.Time { + return null.NewTime(time.Time{}, false) +} diff --git a/pathx/path.go b/pathx/path.go new file mode 100644 index 0000000..3d9dc1c --- /dev/null +++ b/pathx/path.go @@ -0,0 +1,18 @@ +package pathx + +import ( + "path" + "strings" +) + +func FilenameWithoutExt(s string) string { + if s == "" { + return "" + } + + filename := path.Base(s) + if ext := path.Ext(s); ext != "" { + filename = strings.TrimSuffix(filename, ext) + } + return filename +} diff --git a/pathx/path_test.go b/pathx/path_test.go new file mode 100644 index 0000000..ccaead7 --- /dev/null +++ b/pathx/path_test.go @@ -0,0 +1,25 @@ +package pathx + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func TestFilenameWithoutExt(t *testing.T) { + testCases := []struct { + tag string + path string + expected string + }{ + {"t1", "a.jpg", "a"}, + {"t2", "/a/b/c.jpg", "c"}, + {"t3", "/a/b/c", "c"}, + {"t4", "/a/b/c/", "c"}, + {"t5", "/a/b/c/中文.jpg", "中文"}, + {"t5", "https://www.example.com/a/b/c/中文.jpg", "中文"}, + } + for _, testCase := range testCases { + v := FilenameWithoutExt(testCase.path) + assert.Equal(t, testCase.expected, v, testCase.tag) + } +} diff --git a/randx/rand.go b/randx/rand.go new file mode 100644 index 0000000..77d5701 --- /dev/null +++ b/randx/rand.go @@ -0,0 +1,46 @@ +package randx + +import ( + "crypto/rand" + "math/big" + "strings" +) + +const ( + randNumberChars = "0123456789" + randLetterChars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" +) + +func generateValues(str string, n int, upper bool) string { + if n <= 0 { + return "" + } + + sb := strings.Builder{} + sb.Grow(n) + bigInt := big.NewInt(int64(len(str))) + for i := 0; i < n; i++ { + randomInt, _ := rand.Int(rand.Reader, bigInt) + sb.WriteByte(str[randomInt.Int64()]) + } + s := sb.String() + if upper { + s = strings.ToUpper(s) + } + return s +} + +// Letter Generate letter rand string +func Letter(n int, upper bool) string { + return generateValues(randLetterChars, n, upper) +} + +// Number Generate number rand string +func Number(n int) string { + return generateValues(randNumberChars, n, false) +} + +// Any Generate number and letter combined string +func Any(n int) string { + return generateValues(randLetterChars+randNumberChars, n, false) +} diff --git a/randx/rand_test.go b/randx/rand_test.go new file mode 100644 index 0000000..c4f9b76 --- /dev/null +++ b/randx/rand_test.go @@ -0,0 +1,17 @@ +package randx + +import ( + "testing" +) + +func BenchmarkNumber(b *testing.B) { + for i := 0; i < b.N; i++ { + Number(10) + } +} + +func BenchmarkAny(b *testing.B) { + for i := 0; i < b.N; i++ { + Any(10) + } +} diff --git a/setx/set.go b/setx/set.go new file mode 100644 index 0000000..2ba0916 --- /dev/null +++ b/setx/set.go @@ -0,0 +1,70 @@ +package setx + +import ( + "strings" + + gox "git.cloudyne.io/go/hiscaler-gox" + "git.cloudyne.io/go/hiscaler-gox/inx" +) + +// ToSet change slice to unique values +func ToSet[T gox.Number | string | bool | byte | rune](values []T) []T { + if len(values) <= 1 { + return values + } + + uniqueValues := make([]T, 0) + kv := make(map[T]struct{}, len(values)) + for _, value := range values { + if _, ok := kv[value]; !ok { + kv[value] = struct{}{} + uniqueValues = append(uniqueValues, value) + } + } + return uniqueValues +} + +func ToStringSet(values []string, caseSensitive bool) []string { + if len(values) <= 1 { + return values + } + + m := make(map[string]string, 0) + for _, value := range values { + value = strings.TrimSpace(value) + if value != "" { + fixedValue := value + if !caseSensitive { + fixedValue = strings.ToLower(fixedValue) + } + if _, ok := m[fixedValue]; !ok { + m[fixedValue] = value + } + } + } + if len(m) == 0 { + return nil + } + + sets := make([]string, len(m)) + i := 0 + for _, v := range m { + sets[i] = v + i++ + } + return sets +} + +func ToIntSet(values []int) []int { + if len(values) <= 1 { + return values + } + + sets := make([]int, 0) + for _, value := range values { + if !inx.IntIn(value, sets...) { + sets = append(sets, value) + } + } + return sets +} diff --git a/setx/set_test.go b/setx/set_test.go new file mode 100644 index 0000000..163e814 --- /dev/null +++ b/setx/set_test.go @@ -0,0 +1,92 @@ +package setx + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func TestToSet(t *testing.T) { + assert.ElementsMatch(t, []int{1, 2, 3}, ToSet([]int{1, 2, 3}), "int1") + assert.ElementsMatch(t, []int{1, 2, 3}, ToSet([]int{1, 1, 2, 2, 3, 3}), "int2") + assert.ElementsMatch(t, []float32{1, 2, 3}, ToSet([]float32{1, 2, 3}), "float321") + assert.ElementsMatch(t, []float64{1, 2, 3}, ToSet([]float64{1, 1, 2, 2, 3, 3}), "float641") + assert.ElementsMatch(t, []string{"A", "B", "C"}, ToSet([]string{"A", "B", "C", "C", "B", "A"}), "string1") + assert.ElementsMatch(t, []string{"A", " A ", "B", "C"}, ToSet([]string{" A ", "B", "C", "C", "B", "A"}), "string2") +} + +func TestToStringSet(t *testing.T) { + testCases := []struct { + A []string + B []string + Len int + Values []string + }{ + {[]string{"1", "2", "3"}, []string{"0", "1", "4"}, 5, []string{"0", "1", "2", "3", "4"}}, + {[]string{"1", "1", "1"}, []string{"0", "1", "2"}, 3, []string{"0", "1", "2"}}, + {[]string{" ", "1", "1", "1"}, []string{"0", "1", "2"}, 3, []string{"0", "1", "2"}}, + {[]string{"\tabc\t", " abc ", "1", "1", "1"}, []string{"0", "1", "2"}, 4, []string{"0", "1", "2", "abc"}}, + {[]string{"\tabc\t", " abc ", "1", "1", "1", "ABC"}, []string{"0", "1", "2"}, 5, []string{"0", "1", "2", "abc", "ABC"}}, + } + + for _, testCase := range testCases { + c := ToStringSet(append(testCase.A, testCase.B...), true) + if len(c) != testCase.Len { + t.Errorf("Except %d, actual %d", testCase.Len, len(c)) + } + for _, value := range testCase.Values { + exists := false + for _, v := range c { + if v == value { + exists = true + break + } + } + if !exists { + t.Errorf("%s not in %#v", value, testCase.Values) + } + } + } +} + +func BenchmarkToStringSet(b *testing.B) { + for i := 0; i < b.N; i++ { + ToStringSet([]string{"A", "B", "c", "C", "a", "d", "d", "e", "fgh", "FGH", "fGH", "fgH"}, false) + } +} + +func TestIntSliceToSet(t *testing.T) { + testCases := []struct { + A []int + B []int + Len int + Values []int + }{ + {[]int{1, 2, 3}, []int{0, 1, 4}, 5, []int{0, 1, 2, 3, 4}}, + {[]int{1, 1, 1}, []int{0, 1, 2}, 3, []int{0, 1, 2}}, + } + + for _, testCase := range testCases { + c := ToIntSet(append(testCase.A, testCase.B...)) + if len(c) != testCase.Len { + t.Errorf("Except %d, actual %d", testCase.Len, len(c)) + } + for _, value := range testCase.Values { + exists := false + for _, v := range c { + if v == value { + exists = true + break + } + } + if !exists { + t.Errorf("%d not in %#v", value, testCase.Values) + } + } + } +} + +func BenchmarkToIntSet(b *testing.B) { + for i := 0; i < b.N; i++ { + ToIntSet([]int{1, 2, 3, 3, 45, 5, 6, 56, 56, 56, 77, 6, 7, 67, 678, 78, 78, 8, 78}) + } +} diff --git a/slicex/slice.go b/slicex/slice.go new file mode 100644 index 0000000..7b9fcd2 --- /dev/null +++ b/slicex/slice.go @@ -0,0 +1,241 @@ +package slicex + +import ( + "strings" + + gox "git.cloudyne.io/go/hiscaler-gox" +) + +// Map values all value execute f function, and return a new slice +// +// Example +// +// Map([]string{"A", "B", "C"}, func(v string) string { +// return strings.ToLower(v) +// }) +// // Output: ["a", "b", "c"] +func Map[T comparable](values []T, f func(v T) T) []T { + items := make([]T, len(values)) + for i, v := range values { + items[i] = f(v) + } + return items +} + +// Filter return matched f function condition value +// +// Example: +// +// Filter([]int{0, 1, 2, 3}, func(v int) bool { +// return v > 0 +// }) +// // Output: [1, 2, 3] +func Filter[T comparable](values []T, f func(v T) bool) []T { + items := make([]T, 0) + for _, v := range values { + if ok := f(v); ok { + items = append(items, v) + } + } + return items +} + +func ToInterface[T gox.Number | ~string](values []T) []interface{} { + if values == nil || len(values) == 0 { + return []interface{}{} + } + + ifs := make([]interface{}, len(values)) + for i, value := range values { + ifs[i] = value + } + return ifs +} + +// StringToInterface Change string slice to interface slice +func StringToInterface(values []string) []interface{} { + return ToInterface(values) +} + +// IntToInterface Change int slice to interface slice +func IntToInterface(values []int) []interface{} { + return ToInterface(values) +} + +// StringSliceEqual Check a, b is equal +func StringSliceEqual(a, b []string, caseSensitive, ignoreEmpty, trim bool) bool { + if a == nil && b == nil { + return true + } else if a == nil || b == nil { + return false + } + + if !caseSensitive || ignoreEmpty || trim { + fixFunc := func(ss []string) []string { + if len(ss) == 0 { + return ss + } + values := make([]string, 0) + for _, s := range ss { + if trim { + s = strings.TrimSpace(s) + } + if s == "" && ignoreEmpty { + continue + } + if !caseSensitive { + s = strings.ToUpper(s) + } + values = append(values, s) + } + return values + } + a = fixFunc(a) + b = fixFunc(b) + } + if len(a) != len(b) { + return false + } + + for _, av := range a { + exists := false + for _, bv := range b { + if av == bv { + exists = true + break + } + } + if !exists { + return false + } + } + return true +} + +// IntSliceEqual Check a, b is equal +func IntSliceEqual(a, b []int) bool { + if a == nil && b == nil { + return true + } else if a == nil || b == nil || len(a) != len(b) { + return false + } + + for _, av := range a { + exists := false + for _, bv := range b { + if av == bv { + exists = true + break + } + } + if !exists { + return false + } + } + return true +} + +func StringSliceReverse(ss []string) []string { + n := len(ss) + if n <= 1 { + return ss + } + + vv := make([]string, len(ss)) + copy(vv, ss) + for k1 := 0; k1 < n/2; k1++ { + k2 := n - k1 - 1 + vv[k1], vv[k2] = vv[k2], vv[k1] + } + return vv +} + +func IntSliceReverse(ss []int) []int { + n := len(ss) + if n <= 1 { + return ss + } + + vv := make([]int, len(ss)) + copy(vv, ss) + for k1 := 0; k1 < n/2; k1++ { + k2 := n - k1 - 1 + vv[k1], vv[k2] = vv[k2], vv[k1] + } + return vv +} + +// Diff return a slice in ss[0] and not in ss[1:] +func Diff[T comparable](values ...[]T) []T { + diffValues := make([]T, 0) + n := len(values) + if n == 0 || values[0] == nil { + return diffValues + } else if n == 1 { + return values[0] + } else { + items := make(map[T]struct{}, 0) + for _, vs := range values[1:] { + for _, v := range vs { + items[v] = struct{}{} + } + } + for _, v := range values[0] { + if _, ok := items[v]; !ok { + diffValues = append(diffValues, v) + } + } + } + return diffValues +} + +func StringSliceDiff(ss ...[]string) []string { + diffValues := make([]string, 0) + if len(ss) == 0 || ss[0] == nil { + return diffValues + } else if len(ss) == 1 { + return ss[0] + } else { + for _, v1 := range ss[0] { + exists := false + for _, items := range ss[1:] { + for _, v2 := range items { + if strings.EqualFold(v1, v2) { + exists = true + break + } + } + if exists { + break + } + } + if !exists { + diffValues = append(diffValues, v1) + } + } + } + return diffValues +} + +func IntSliceDiff(ss ...[]int) []int { + return Diff(ss...) +} + +// Chunk chunks a slice by size +func Chunk[T comparable](items []T, size int) [][]T { + chunkItems := make([][]T, 0) + n := len(items) + if items == nil || n == 0 { + return chunkItems + } else if size <= 0 { + return [][]T{items} + } + for i := 0; i < n; i += size { + end := i + size + if end > n { + end = n + } + chunkItems = append(chunkItems, items[i:end]) + } + return chunkItems +} diff --git a/slicex/slice_test.go b/slicex/slice_test.go new file mode 100644 index 0000000..bec642e --- /dev/null +++ b/slicex/slice_test.go @@ -0,0 +1,219 @@ +package slicex + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func TestMap(t *testing.T) { + a := []int{0, 1} + b := Map(a, func(v int) int { + return v + 1 + }) + assert.Equal(t, []int{0, 1}, a, "map.int") + assert.Equal(t, []int{1, 2}, b, "map.int") + + a1 := []string{"a", "b"} + b1 := Map(a1, func(v string) string { + return v + "1" + }) + assert.Equal(t, []string{"a", "b"}, a1, "map.string") + assert.Equal(t, []string{"a1", "b1"}, b1, "map.string") +} + +func TestFilter(t *testing.T) { + assert.Equal(t, []int{1, 2, 3}, Filter([]int{0, 1, 2, 3}, func(v int) bool { + return v > 0 + }), "map.int") + assert.Equal(t, []string{"c"}, Filter([]string{"a", "b", "c", "a", "b"}, func(v string) bool { + return v == "c" + }), "map.string") +} + +func TestStringToInterface(t *testing.T) { + tests := []struct { + tag string + input []string + expected []interface{} + }{ + {"t0", []string{"a", "b", "c"}, []interface{}{"a", "b", "c"}}, + {"t1", nil, []interface{}{}}, + } + for _, test := range tests { + v := StringToInterface(test.input) + assert.Equal(t, test.expected, v, test.tag) + } +} + +func BenchmarkStringToInterface(b *testing.B) { + for n := 0; n < b.N; n++ { + StringToInterface([]string{"a", "b", "c"}) + } +} + +func TestStringSliceEqual(t *testing.T) { + testCases := []struct { + A []string + B []string + CaseSensitive bool + IgnoreEmpty bool + Trim bool + Except bool + }{ + {[]string{}, []string{}, false, true, true, true}, + {[]string{"a", "b", "c"}, []string{"a", "b", "c"}, true, false, true, true}, + {[]string{"a", "b", "c"}, []string{"a", "b ", " c"}, true, false, true, true}, + {[]string{"a", "b", "c"}, []string{"a", "b ", " c"}, true, false, false, false}, + {[]string{"a", "b", "c", ""}, []string{"a", "b ", " c"}, true, false, true, false}, + {[]string{"a", "b", "c", ""}, []string{"a", "b ", " c"}, true, true, true, true}, + {[]string{"a", "b", "c", ""}, []string{"a", "b ", " c", ""}, true, false, true, true}, + {[]string{"A", "B", "C"}, []string{"a", "b", "c"}, true, true, true, false}, + {[]string{"A", "B", "C"}, []string{"a", "b", "c"}, false, true, true, true}, + {[]string{"A", "B", "C"}, []string{"b", "c", "a"}, false, true, true, true}, + {[]string{" ", "", " "}, []string{""}, false, true, true, true}, + {[]string{}, []string{" ", ""}, false, true, true, true}, + {[]string{}, []string{"a", "b"}, false, true, true, false}, + {nil, []string{}, false, true, true, false}, + {[]string{}, nil, false, true, true, false}, + {nil, nil, false, true, true, true}, + } + for i, testCase := range testCases { + equal := StringSliceEqual(testCase.A, testCase.B, testCase.CaseSensitive, testCase.IgnoreEmpty, testCase.Trim) + if equal != testCase.Except { + t.Errorf("%d except %v actual %v", i, testCase.Except, equal) + } + } +} + +func TestIntSliceEqual(t *testing.T) { + testCases := []struct { + A []int + B []int + Except bool + }{ + {[]int{}, []int{}, true}, + {[]int{0, 1, 2}, []int{0, 1, 2}, true}, + {[]int{0, 1, 2}, []int{2, 1, 0}, true}, + {[]int{0, 1, 2}, []int{1, 2}, false}, + {[]int{0, 1, 1, 2}, []int{0, 1, 2}, false}, + {[]int{0, 1, 1, 2}, []int{0, 1, 2, 1}, true}, + {nil, []int{}, false}, + {nil, nil, true}, + {[]int{}, nil, false}, + } + + for i, testCase := range testCases { + equal := IntSliceEqual(testCase.A, testCase.B) + if equal != testCase.Except { + t.Errorf("%d except %v actual %v", i, testCase.Except, equal) + } + } +} + +func TestStringSliceReverse(t *testing.T) { + testCases := []struct { + Before []string + After []string + }{ + {[]string{"a"}, []string{"a"}}, + {[]string{"a", "b"}, []string{"b", "a"}}, + {[]string{"a", "b", "c"}, []string{"c", "b", "a"}}, + } + + for _, testCase := range testCases { + values := StringSliceReverse(testCase.Before) + if len(values) != len(testCase.After) { + t.Errorf("%#v reverse after value except: %#v, actual: %#v", testCase.Before, testCase.After, values) + } else { + for j, v := range values { + if testCase.After[j] != v { + t.Errorf("%#v reverse after value except: %#v, actual: %#v", testCase.Before, testCase.After, values) + break + } + } + } + } +} + +func TestStringSliceDiff(t *testing.T) { + testCases := []struct { + Number int + OriginalValues [][]string + DiffValue []string + }{ + {1, [][]string{{"a", "b", "c"}, {"a", "b", "d"}}, []string{"c"}}, + {1, [][]string{{"a", "b", "c"}, {"a"}}, []string{"b", "c"}}, + {2, [][]string{{"a", "b", "d"}, {"a", "b", "c"}}, []string{"d"}}, + {3, [][]string{{"a", "b", "c"}, {"a", "b", "c"}}, []string{}}, + {4, [][]string{{"a", "b", ""}, {"a", "b", "c"}}, []string{""}}, + {5, [][]string{{"a", "b", "c"}, {"a", "b"}, {"c"}}, []string{}}, + {6, [][]string{{"a"}, {"b"}, {"c", "c1"}, {"d"}}, []string{"a"}}, + {7, [][]string{nil, {"a"}, {"b"}, {"c", "c1"}, {"d"}}, []string{}}, + {8, [][]string{nil}, []string{}}, + {9, [][]string{nil, nil, nil}, []string{}}, + } + + for _, testCase := range testCases { + values := StringSliceDiff(testCase.OriginalValues...) + if !StringSliceEqual(values, testCase.DiffValue, true, false, true) { + t.Errorf("%d: diff values except: %#v, actual: %#v", testCase.Number, testCase.DiffValue, values) + } + } +} + +func TestIntSliceDiff(t *testing.T) { + testCases := []struct { + Number int + OriginalValues [][]int + DiffValue []int + }{ + {1, [][]int{{1, 2, 3}, {1, 2, 4}}, []int{3}}, + {2, [][]int{{1, 2, 3}, {1, 2, 2, 3}, {3, 4, 5}}, []int{}}, + {3, [][]int{{1, 2, 3}, {1}, {2}, {3}}, []int{}}, + {4, [][]int{{1, 2, 3}, {1, 2, 4, 0, 2, 1}}, []int{3}}, + {5, [][]int{{1, 2, 2, 3}, {1}}, []int{2, 2, 3}}, + {6, [][]int{}, []int{}}, + {7, [][]int{nil, {1, 2, 3}}, []int{}}, + {8, [][]int{nil, nil, {1, 2, 3}}, []int{}}, + } + + for _, testCase := range testCases { + values := IntSliceDiff(testCase.OriginalValues...) + if !IntSliceEqual(values, testCase.DiffValue) { + t.Errorf("%d: diff values except: %#v, actual: %#v", testCase.Number, testCase.DiffValue, values) + } + } +} + +func TestToInterface(t *testing.T) { + type args struct { + values interface{} + } + tests := []struct { + name string + args args + want []interface{} + }{ + {"t1", args{[]int{1, 2, 3}}, []interface{}{1, 2, 3}}, + {"t2", args{[]string{"A", "B", "C"}}, []interface{}{"A", "B", "C"}}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var value interface{} + switch tt.args.values.(type) { + case []string: + value = ToInterface(tt.args.values.([]string)) + case []int: + value = ToInterface(tt.args.values.([]int)) + } + assert.Equalf(t, tt.want, value, "ToInterface(%v)", tt.args.values) + }) + } +} + +func TestChunk(t *testing.T) { + assert.ElementsMatch(t, Chunk([]int{1, 2, 3}, 0), [][]int{{1, 2, 3}}, "int0") + assert.ElementsMatch(t, Chunk([]int{1, 2, 3}, 1), [][]int{{1}, {2}, {3}}, "int1") + assert.ElementsMatch(t, Chunk([]int{1, 2, 3}, 2), [][]int{{1, 2}, {3}}, "int2") + assert.ElementsMatch(t, Chunk([]int{1, 2, 3}, 3), [][]int{{1, 2, 3}}, "int3") +} diff --git a/spreedsheetx/column.go b/spreedsheetx/column.go new file mode 100644 index 0000000..0b65371 --- /dev/null +++ b/spreedsheetx/column.go @@ -0,0 +1,192 @@ +package spreedsheetx + +import ( + "errors" + "fmt" + "math" + "regexp" + "strings" +) + +// https://support.microsoft.com/en-us/office/excel-specifications-and-limits-1672b34d-7043-467e-8e27-269d656771c3?ui=en-us&rs=en-us&ad=us +// Max index is 16384 XFD + +var ( + rxColumnName = regexp.MustCompile("^[A-Za-z]{1,3}$") +) + +const ( + minNumber = 1 + maxNumber = 16384 + a = 64 +) + +type Column struct { + startName string // 最开始操作的列 + endName string // 最远到达的列 + current string // 当前列 +} + +func isValidName(name string) bool { + return rxColumnName.MatchString(name) && toNumber(name) <= maxNumber +} + +func reverse(name string) []rune { + d := []rune(name) + for i, j := 0, len(d)-1; i < j; i, j = i+1, j-1 { + d[i], d[j] = d[j], d[i] + } + return d +} + +func toNumber(name string) int { + name = strings.ToUpper(name) + switch len(name) { + case 0: + return 0 + case 1: + return int(rune(name[0])) - a + default: + number := 0 + for i, r := range reverse(name) { + if i == 0 { + number += int(r) - a + } else { + number += (int(r) - a) * int(math.Pow(26, float64(i))) + } + } + return number + } +} + +func NewColumn(name string) *Column { + name = strings.ToUpper(name) + if !isValidName(name) { + panic("invalid column name") + } + + return &Column{ + startName: name, + endName: name, + current: name, + } +} + +// ToFirst 到第一列,总是返回 A 列 +func (c *Column) ToFirst() *Column { + c.current = "A" + return c +} + +// Next 当前列的下一列 +func (c *Column) Next() (*Column, error) { + return c.RightShift(1) +} + +func (c *Column) Prev() (*Column, error) { + return c.LeftShift(1) +} + +// StartName 返回最开始的列名 +func (c Column) StartName() string { + return c.startName +} + +func (c *Column) setEndName(name string) *Column { + if c.endName < name || len(c.endName) < len(name) { + c.endName = name + } + return c +} + +// EndName 返回最远到达的列名 +func (c Column) EndName() string { + return c.endName +} + +// Name 当前列名 +func (c Column) Name() string { + return c.current +} + +// NameWithRow 带行号的列名,比如:A1 +func (c Column) NameWithRow(row int) string { + return fmt.Sprintf("%s%d", c.current, row) +} + +// Reset 重置到最开始的列(NewColumn 创建时的列) +func (c *Column) Reset() *Column { + c.current = c.startName + c.endName = c.startName + return c +} + +// To 跳转到指定的列 +func (c *Column) To(name string) (*Column, error) { + name = strings.ToUpper(name) + if !isValidName(name) { + return c, fmt.Errorf("invalid column name %s", name) + } + c.current = name + c.setEndName(name) + return c, nil +} + +func (c *Column) RightShift(steps int) (*Column, error) { + if steps <= 0 { + return c, nil + } + return c.shift(steps) +} + +func (c *Column) LeftShift(steps int) (*Column, error) { + if steps <= 0 { + return c, nil + } + return c.shift(-steps) +} + +// RightShift 基于当前位置右移多少列 +func (c *Column) shift(steps int) (*Column, error) { + if steps == 0 { + return c, nil + } + + number := toNumber(c.current) + number += steps + if number > maxNumber { + return c, errors.New("out of max columns") + } else if number < minNumber { + return c, errors.New("out of min columns") + } + + sb := strings.Builder{} + sb.Grow(3) // Max 3 letters + times := 0 + for { + times++ + quotient := number / 26 + remainder := number % 26 + if remainder == 0 { + sb.WriteRune('Z') + } else { + sb.WriteRune(rune(a + remainder)) + } + if quotient == 0 { + break + } else if quotient <= 26 { + if quotient != 1 || (times >= 1 && remainder != 0) { + sb.WriteRune(rune(a + quotient)) + } + break + } + number = quotient + } + + c.current = string(reverse(sb.String())) + if steps > 0 { + // Is right shift + c.setEndName(c.current) + } + return c, nil +} diff --git a/spreedsheetx/column_test.go b/spreedsheetx/column_test.go new file mode 100644 index 0000000..10d5c52 --- /dev/null +++ b/spreedsheetx/column_test.go @@ -0,0 +1,88 @@ +package spreedsheetx + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func TestNewColumn(t *testing.T) { + column := NewColumn("A") + assert.Equal(t, "A", column.Name()) + column.Next() + assert.Equal(t, "B", column.Name()) + column.RightShift(2) + assert.Equal(t, "D", column.Name()) + column.To("F") + assert.Equal(t, "F", column.Name()) + column.Next() + assert.Equal(t, "G", column.Name()) + _, err := column.To("ZZZ") + assert.Equal(t, true, err != nil, "err") +} + +func TestNewColumn2(t *testing.T) { + column := NewColumn("A") + column.To("ZZ") + assert.Equal(t, "ZZ", column.Name()) + column.RightShift(2) + assert.Equal(t, "AAB", column.Name()) +} + +func TestNewColumn3(t *testing.T) { + column := NewColumn("ABC") + assert.Equal(t, "ABC", column.Name()) + column.RightShift(2) + assert.Equal(t, "ABE", column.Name()) + column.RightShift(53) + assert.Equal(t, "ADF", column.Name()) + column.To("A") + column.Next() + assert.Equal(t, "B", column.Name()) + column.Next() + assert.Equal(t, "C", column.Name()) + column.To("A") + column.RightShift(26) + assert.Equal(t, "AA", column.Name()) +} + +func TestNewColumn4(t *testing.T) { + column := NewColumn("A") + column.RightShift(1000) + assert.Equal(t, "ALM", column.Name()) + column.To("A") + column.RightShift(25) + assert.Equal(t, "Z", column.Name()) + column.RightShift(1) + assert.Equal(t, "AA", column.Name()) + column.To("A") + column.RightShift(maxNumber - 1) + assert.Equal(t, "XFD", column.Name()) +} + +func TestNewColumn5(t *testing.T) { + column := NewColumn("A") + column.Next() + column.RightShift(26) + assert.Equal(t, "AB", column.Name()) + column.Reset() + assert.Equal(t, "A", column.Name()) + assert.Equal(t, "A", column.StartName()) + assert.Equal(t, "A", column.EndName()) +} + +func TestNewLeftShift(t *testing.T) { + column := NewColumn("Z") + column.Prev() + assert.Equal(t, "Y", column.Name()) + column.LeftShift(23) + assert.Equal(t, "B", column.Name()) + column.LeftShift(1) + assert.Equal(t, "A", column.Name()) + _, err := column.LeftShift(1) + assert.Equal(t, err != nil, true, "err") + column.Reset() + assert.Equal(t, "Z", column.Name()) + assert.Equal(t, "Z", column.StartName()) + column.To("AA") + assert.Equal(t, "AA", column.EndName()) +} diff --git a/stringx/string.go b/stringx/string.go new file mode 100644 index 0000000..9572081 --- /dev/null +++ b/stringx/string.go @@ -0,0 +1,554 @@ +package stringx + +import ( + "bytes" + "encoding/json" + "fmt" + "reflect" + "regexp" + "regexp/syntax" + "sort" + "strconv" + "strings" + "unicode" + "unsafe" + + "git.cloudyne.io/go/hiscaler-gox/slicex" + "golang.org/x/text/width" +) + +var ( + rxEmoji = regexp.MustCompile(`[\x{1F3F4}](?:\x{E0067}\x{E0062}\x{E0077}\x{E006C}\x{E0073}\x{E007F})|[\x{1F3F4}](?:\x{E0067}\x{E0062}\x{E0073}\x{E0063}\x{E0074}\x{E007F})|[\x{1F3F4}](?:\x{E0067}\x{E0062}\x{E0065}\x{E006E}\x{E0067}\x{E007F})|[\x{1F3F4}](?:\x{200D}\x{2620}\x{FE0F})|[\x{1F3F3}](?:\x{FE0F}\x{200D}\x{1F308})|[\x{0023}\x{002A}\x{0030}\x{0031}\x{0032}\x{0033}\x{0034}\x{0035}\x{0036}\x{0037}\x{0038}\x{0039}](?:\x{FE0F}\x{20E3})|[\x{1F441}](?:\x{FE0F}\x{200D}\x{1F5E8}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F467}\x{200D}\x{1F467})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F467}\x{200D}\x{1F466})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F467})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F466}\x{200D}\x{1F466})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F466})|[\x{1F468}](?:\x{200D}\x{1F468}\x{200D}\x{1F467}\x{200D}\x{1F467})|[\x{1F468}](?:\x{200D}\x{1F468}\x{200D}\x{1F466}\x{200D}\x{1F466})|[\x{1F468}](?:\x{200D}\x{1F468}\x{200D}\x{1F467}\x{200D}\x{1F466})|[\x{1F468}](?:\x{200D}\x{1F468}\x{200D}\x{1F467})|[\x{1F468}](?:\x{200D}\x{1F468}\x{200D}\x{1F466})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F469}\x{200D}\x{1F467}\x{200D}\x{1F467})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F469}\x{200D}\x{1F466}\x{200D}\x{1F466})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F469}\x{200D}\x{1F467}\x{200D}\x{1F466})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F469}\x{200D}\x{1F467})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F469}\x{200D}\x{1F466})|[\x{1F469}](?:\x{200D}\x{2764}\x{FE0F}\x{200D}\x{1F469})|[\x{1F469}\x{1F468}](?:\x{200D}\x{2764}\x{FE0F}\x{200D}\x{1F468})|[\x{1F469}](?:\x{200D}\x{2764}\x{FE0F}\x{200D}\x{1F48B}\x{200D}\x{1F469})|[\x{1F469}\x{1F468}](?:\x{200D}\x{2764}\x{FE0F}\x{200D}\x{1F48B}\x{200D}\x{1F468})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{1F9B3})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{1F9B3})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{1F9B3})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{1F9B3})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{1F9B3})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F9B3})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{1F9B2})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{1F9B2})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{1F9B2})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{1F9B2})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{1F9B2})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F9B2})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{1F9B1})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{1F9B1})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{1F9B1})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{1F9B1})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{1F9B1})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F9B1})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{1F9B0})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{1F9B0})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{1F9B0})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{1F9B0})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{1F9B0})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F9B0})|[\x{1F575}\x{1F3CC}\x{26F9}\x{1F3CB}](?:\x{FE0F}\x{200D}\x{2640}\x{FE0F})|[\x{1F575}\x{1F3CC}\x{26F9}\x{1F3CB}](?:\x{FE0F}\x{200D}\x{2642}\x{FE0F})|[\x{1F46E}\x{1F575}\x{1F482}\x{1F477}\x{1F473}\x{1F471}\x{1F9D9}\x{1F9DA}\x{1F9DB}\x{1F9DC}\x{1F9DD}\x{1F64D}\x{1F64E}\x{1F645}\x{1F646}\x{1F481}\x{1F64B}\x{1F647}\x{1F926}\x{1F937}\x{1F486}\x{1F487}\x{1F6B6}\x{1F3C3}\x{1F9D6}\x{1F9D7}\x{1F9D8}\x{1F3CC}\x{1F3C4}\x{1F6A3}\x{1F3CA}\x{26F9}\x{1F3CB}\x{1F6B4}\x{1F6B5}\x{1F938}\x{1F93D}\x{1F93E}\x{1F939}](?:\x{1F3FF}\x{200D}\x{2640}\x{FE0F})|[\x{1F46E}\x{1F575}\x{1F482}\x{1F477}\x{1F473}\x{1F471}\x{1F9D9}\x{1F9DA}\x{1F9DB}\x{1F9DC}\x{1F9DD}\x{1F64D}\x{1F64E}\x{1F645}\x{1F646}\x{1F481}\x{1F64B}\x{1F647}\x{1F926}\x{1F937}\x{1F486}\x{1F487}\x{1F6B6}\x{1F3C3}\x{1F9D6}\x{1F9D7}\x{1F9D8}\x{1F3CC}\x{1F3C4}\x{1F6A3}\x{1F3CA}\x{26F9}\x{1F3CB}\x{1F6B4}\x{1F6B5}\x{1F938}\x{1F93D}\x{1F93E}\x{1F939}](?:\x{1F3FE}\x{200D}\x{2640}\x{FE0F})|[\x{1F46E}\x{1F575}\x{1F482}\x{1F477}\x{1F473}\x{1F471}\x{1F9D9}\x{1F9DA}\x{1F9DB}\x{1F9DC}\x{1F9DD}\x{1F64D}\x{1F64E}\x{1F645}\x{1F646}\x{1F481}\x{1F64B}\x{1F647}\x{1F926}\x{1F937}\x{1F486}\x{1F487}\x{1F6B6}\x{1F3C3}\x{1F9D6}\x{1F9D7}\x{1F9D8}\x{1F3CC}\x{1F3C4}\x{1F6A3}\x{1F3CA}\x{26F9}\x{1F3CB}\x{1F6B4}\x{1F6B5}\x{1F938}\x{1F93D}\x{1F93E}\x{1F939}](?:\x{1F3FD}\x{200D}\x{2640}\x{FE0F})|[\x{1F46E}\x{1F575}\x{1F482}\x{1F477}\x{1F473}\x{1F471}\x{1F9D9}\x{1F9DA}\x{1F9DB}\x{1F9DC}\x{1F9DD}\x{1F64D}\x{1F64E}\x{1F645}\x{1F646}\x{1F481}\x{1F64B}\x{1F647}\x{1F926}\x{1F937}\x{1F486}\x{1F487}\x{1F6B6}\x{1F3C3}\x{1F9D6}\x{1F9D7}\x{1F9D8}\x{1F3CC}\x{1F3C4}\x{1F6A3}\x{1F3CA}\x{26F9}\x{1F3CB}\x{1F6B4}\x{1F6B5}\x{1F938}\x{1F93D}\x{1F93E}\x{1F939}](?:\x{1F3FC}\x{200D}\x{2640}\x{FE0F})|[\x{1F46E}\x{1F575}\x{1F482}\x{1F477}\x{1F473}\x{1F471}\x{1F9D9}\x{1F9DA}\x{1F9DB}\x{1F9DC}\x{1F9DD}\x{1F64D}\x{1F64E}\x{1F645}\x{1F646}\x{1F481}\x{1F64B}\x{1F647}\x{1F926}\x{1F937}\x{1F486}\x{1F487}\x{1F6B6}\x{1F3C3}\x{1F9D6}\x{1F9D7}\x{1F9D8}\x{1F3CC}\x{1F3C4}\x{1F6A3}\x{1F3CA}\x{26F9}\x{1F3CB}\x{1F6B4}\x{1F6B5}\x{1F938}\x{1F93D}\x{1F93E}\x{1F939}](?:\x{1F3FB}\x{200D}\x{2640}\x{FE0F})|[\x{1F46E}\x{1F9B8}\x{1F9B9}\x{1F482}\x{1F477}\x{1F473}\x{1F471}\x{1F9D9}\x{1F9DA}\x{1F9DB}\x{1F9DC}\x{1F9DD}\x{1F9DE}\x{1F9DF}\x{1F64D}\x{1F64E}\x{1F645}\x{1F646}\x{1F481}\x{1F64B}\x{1F647}\x{1F926}\x{1F937}\x{1F486}\x{1F487}\x{1F6B6}\x{1F3C3}\x{1F46F}\x{1F9D6}\x{1F9D7}\x{1F9D8}\x{1F3C4}\x{1F6A3}\x{1F3CA}\x{1F6B4}\x{1F6B5}\x{1F938}\x{1F93C}\x{1F93D}\x{1F93E}\x{1F939}](?:\x{200D}\x{2640}\x{FE0F})|[\x{1F46E}\x{1F575}\x{1F482}\x{1F477}\x{1F473}\x{1F471}\x{1F9D9}\x{1F9DA}\x{1F9DB}\x{1F9DC}\x{1F9DD}\x{1F64D}\x{1F64E}\x{1F645}\x{1F646}\x{1F481}\x{1F64B}\x{1F647}\x{1F926}\x{1F937}\x{1F486}\x{1F487}\x{1F6B6}\x{1F3C3}\x{1F9D6}\x{1F9D7}\x{1F9D8}\x{1F3CC}\x{1F3C4}\x{1F6A3}\x{1F3CA}\x{26F9}\x{1F3CB}\x{1F6B4}\x{1F6B5}\x{1F938}\x{1F93D}\x{1F93E}\x{1F939}](?:\x{1F3FF}\x{200D}\x{2642}\x{FE0F})|[\x{1F46E}\x{1F575}\x{1F482}\x{1F477}\x{1F473}\x{1F471}\x{1F9D9}\x{1F9DA}\x{1F9DB}\x{1F9DC}\x{1F9DD}\x{1F64D}\x{1F64E}\x{1F645}\x{1F646}\x{1F481}\x{1F64B}\x{1F647}\x{1F926}\x{1F937}\x{1F486}\x{1F487}\x{1F6B6}\x{1F3C3}\x{1F9D6}\x{1F9D7}\x{1F9D8}\x{1F3CC}\x{1F3C4}\x{1F6A3}\x{1F3CA}\x{26F9}\x{1F3CB}\x{1F6B4}\x{1F6B5}\x{1F938}\x{1F93D}\x{1F93E}\x{1F939}](?:\x{1F3FE}\x{200D}\x{2642}\x{FE0F})|[\x{1F46E}\x{1F575}\x{1F482}\x{1F477}\x{1F473}\x{1F471}\x{1F9D9}\x{1F9DA}\x{1F9DB}\x{1F9DC}\x{1F9DD}\x{1F64D}\x{1F64E}\x{1F645}\x{1F646}\x{1F481}\x{1F64B}\x{1F647}\x{1F926}\x{1F937}\x{1F486}\x{1F487}\x{1F6B6}\x{1F3C3}\x{1F9D6}\x{1F9D7}\x{1F9D8}\x{1F3CC}\x{1F3C4}\x{1F6A3}\x{1F3CA}\x{26F9}\x{1F3CB}\x{1F6B4}\x{1F6B5}\x{1F938}\x{1F93D}\x{1F93E}\x{1F939}](?:\x{1F3FD}\x{200D}\x{2642}\x{FE0F})|[\x{1F46E}\x{1F575}\x{1F482}\x{1F477}\x{1F473}\x{1F471}\x{1F9D9}\x{1F9DA}\x{1F9DB}\x{1F9DC}\x{1F9DD}\x{1F64D}\x{1F64E}\x{1F645}\x{1F646}\x{1F481}\x{1F64B}\x{1F647}\x{1F926}\x{1F937}\x{1F486}\x{1F487}\x{1F6B6}\x{1F3C3}\x{1F9D6}\x{1F9D7}\x{1F9D8}\x{1F3CC}\x{1F3C4}\x{1F6A3}\x{1F3CA}\x{26F9}\x{1F3CB}\x{1F6B4}\x{1F6B5}\x{1F938}\x{1F93D}\x{1F93E}\x{1F939}](?:\x{1F3FC}\x{200D}\x{2642}\x{FE0F})|[\x{1F46E}\x{1F575}\x{1F482}\x{1F477}\x{1F473}\x{1F471}\x{1F9D9}\x{1F9DA}\x{1F9DB}\x{1F9DC}\x{1F9DD}\x{1F64D}\x{1F64E}\x{1F645}\x{1F646}\x{1F481}\x{1F64B}\x{1F647}\x{1F926}\x{1F937}\x{1F486}\x{1F487}\x{1F6B6}\x{1F3C3}\x{1F9D6}\x{1F9D7}\x{1F9D8}\x{1F3CC}\x{1F3C4}\x{1F6A3}\x{1F3CA}\x{26F9}\x{1F3CB}\x{1F6B4}\x{1F6B5}\x{1F938}\x{1F93D}\x{1F93E}\x{1F939}](?:\x{1F3FB}\x{200D}\x{2642}\x{FE0F})|[\x{1F46E}\x{1F9B8}\x{1F9B9}\x{1F482}\x{1F477}\x{1F473}\x{1F471}\x{1F9D9}\x{1F9DA}\x{1F9DB}\x{1F9DC}\x{1F9DD}\x{1F9DE}\x{1F9DF}\x{1F64D}\x{1F64E}\x{1F645}\x{1F646}\x{1F481}\x{1F64B}\x{1F647}\x{1F926}\x{1F937}\x{1F486}\x{1F487}\x{1F6B6}\x{1F3C3}\x{1F46F}\x{1F9D6}\x{1F9D7}\x{1F9D8}\x{1F3C4}\x{1F6A3}\x{1F3CA}\x{1F6B4}\x{1F6B5}\x{1F938}\x{1F93C}\x{1F93D}\x{1F93E}\x{1F939}](?:\x{200D}\x{2642}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{1F692})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{1F692})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{1F692})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{1F692})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{1F692})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F692})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{1F680})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{1F680})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{1F680})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{1F680})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{1F680})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F680})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{2708}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{2708}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{2708}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{2708}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{2708}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{200D}\x{2708}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{1F3A8})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{1F3A8})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{1F3A8})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{1F3A8})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{1F3A8})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F3A8})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{1F3A4})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{1F3A4})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{1F3A4})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{1F3A4})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{1F3A4})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F3A4})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{1F4BB})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{1F4BB})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{1F4BB})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{1F4BB})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{1F4BB})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F4BB})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{1F52C})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{1F52C})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{1F52C})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{1F52C})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{1F52C})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F52C})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{1F4BC})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{1F4BC})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{1F4BC})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{1F4BC})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{1F4BC})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F4BC})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{1F3ED})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{1F3ED})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{1F3ED})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{1F3ED})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{1F3ED})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F3ED})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{1F527})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{1F527})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{1F527})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{1F527})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{1F527})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F527})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{1F373})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{1F373})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{1F373})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{1F373})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{1F373})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F373})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{1F33E})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{1F33E})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{1F33E})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{1F33E})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{1F33E})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F33E})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{2696}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{2696}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{2696}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{2696}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{2696}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{200D}\x{2696}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{1F3EB})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{1F3EB})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{1F3EB})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{1F3EB})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{1F3EB})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F3EB})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{1F393})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{1F393})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{1F393})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{1F393})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{1F393})|[\x{1F468}\x{1F469}](?:\x{200D}\x{1F393})|[\x{1F468}\x{1F469}](?:\x{1F3FF}\x{200D}\x{2695}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{1F3FE}\x{200D}\x{2695}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{1F3FD}\x{200D}\x{2695}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{1F3FC}\x{200D}\x{2695}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{1F3FB}\x{200D}\x{2695}\x{FE0F})|[\x{1F468}\x{1F469}](?:\x{200D}\x{2695}\x{FE0F})|[\x{1F476}\x{1F9D2}\x{1F466}\x{1F467}\x{1F9D1}\x{1F468}\x{1F469}\x{1F9D3}\x{1F474}\x{1F475}\x{1F46E}\x{1F575}\x{1F482}\x{1F477}\x{1F934}\x{1F478}\x{1F473}\x{1F472}\x{1F9D5}\x{1F9D4}\x{1F471}\x{1F935}\x{1F470}\x{1F930}\x{1F931}\x{1F47C}\x{1F385}\x{1F936}\x{1F9D9}\x{1F9DA}\x{1F9DB}\x{1F9DC}\x{1F9DD}\x{1F64D}\x{1F64E}\x{1F645}\x{1F646}\x{1F481}\x{1F64B}\x{1F647}\x{1F926}\x{1F937}\x{1F486}\x{1F487}\x{1F6B6}\x{1F3C3}\x{1F483}\x{1F57A}\x{1F9D6}\x{1F9D7}\x{1F9D8}\x{1F6C0}\x{1F6CC}\x{1F574}\x{1F3C7}\x{1F3C2}\x{1F3CC}\x{1F3C4}\x{1F6A3}\x{1F3CA}\x{26F9}\x{1F3CB}\x{1F6B4}\x{1F6B5}\x{1F938}\x{1F93D}\x{1F93E}\x{1F939}\x{1F933}\x{1F4AA}\x{1F9B5}\x{1F9B6}\x{1F448}\x{1F449}\x{261D}\x{1F446}\x{1F595}\x{1F447}\x{270C}\x{1F91E}\x{1F596}\x{1F918}\x{1F919}\x{1F590}\x{270B}\x{1F44C}\x{1F44D}\x{1F44E}\x{270A}\x{1F44A}\x{1F91B}\x{1F91C}\x{1F91A}\x{1F44B}\x{1F91F}\x{270D}\x{1F44F}\x{1F450}\x{1F64C}\x{1F932}\x{1F64F}\x{1F485}\x{1F442}\x{1F443}](?:\x{1F3FF})|[\x{1F476}\x{1F9D2}\x{1F466}\x{1F467}\x{1F9D1}\x{1F468}\x{1F469}\x{1F9D3}\x{1F474}\x{1F475}\x{1F46E}\x{1F575}\x{1F482}\x{1F477}\x{1F934}\x{1F478}\x{1F473}\x{1F472}\x{1F9D5}\x{1F9D4}\x{1F471}\x{1F935}\x{1F470}\x{1F930}\x{1F931}\x{1F47C}\x{1F385}\x{1F936}\x{1F9D9}\x{1F9DA}\x{1F9DB}\x{1F9DC}\x{1F9DD}\x{1F64D}\x{1F64E}\x{1F645}\x{1F646}\x{1F481}\x{1F64B}\x{1F647}\x{1F926}\x{1F937}\x{1F486}\x{1F487}\x{1F6B6}\x{1F3C3}\x{1F483}\x{1F57A}\x{1F9D6}\x{1F9D7}\x{1F9D8}\x{1F6C0}\x{1F6CC}\x{1F574}\x{1F3C7}\x{1F3C2}\x{1F3CC}\x{1F3C4}\x{1F6A3}\x{1F3CA}\x{26F9}\x{1F3CB}\x{1F6B4}\x{1F6B5}\x{1F938}\x{1F93D}\x{1F93E}\x{1F939}\x{1F933}\x{1F4AA}\x{1F9B5}\x{1F9B6}\x{1F448}\x{1F449}\x{261D}\x{1F446}\x{1F595}\x{1F447}\x{270C}\x{1F91E}\x{1F596}\x{1F918}\x{1F919}\x{1F590}\x{270B}\x{1F44C}\x{1F44D}\x{1F44E}\x{270A}\x{1F44A}\x{1F91B}\x{1F91C}\x{1F91A}\x{1F44B}\x{1F91F}\x{270D}\x{1F44F}\x{1F450}\x{1F64C}\x{1F932}\x{1F64F}\x{1F485}\x{1F442}\x{1F443}](?:\x{1F3FE})|[\x{1F476}\x{1F9D2}\x{1F466}\x{1F467}\x{1F9D1}\x{1F468}\x{1F469}\x{1F9D3}\x{1F474}\x{1F475}\x{1F46E}\x{1F575}\x{1F482}\x{1F477}\x{1F934}\x{1F478}\x{1F473}\x{1F472}\x{1F9D5}\x{1F9D4}\x{1F471}\x{1F935}\x{1F470}\x{1F930}\x{1F931}\x{1F47C}\x{1F385}\x{1F936}\x{1F9D9}\x{1F9DA}\x{1F9DB}\x{1F9DC}\x{1F9DD}\x{1F64D}\x{1F64E}\x{1F645}\x{1F646}\x{1F481}\x{1F64B}\x{1F647}\x{1F926}\x{1F937}\x{1F486}\x{1F487}\x{1F6B6}\x{1F3C3}\x{1F483}\x{1F57A}\x{1F9D6}\x{1F9D7}\x{1F9D8}\x{1F6C0}\x{1F6CC}\x{1F574}\x{1F3C7}\x{1F3C2}\x{1F3CC}\x{1F3C4}\x{1F6A3}\x{1F3CA}\x{26F9}\x{1F3CB}\x{1F6B4}\x{1F6B5}\x{1F938}\x{1F93D}\x{1F93E}\x{1F939}\x{1F933}\x{1F4AA}\x{1F9B5}\x{1F9B6}\x{1F448}\x{1F449}\x{261D}\x{1F446}\x{1F595}\x{1F447}\x{270C}\x{1F91E}\x{1F596}\x{1F918}\x{1F919}\x{1F590}\x{270B}\x{1F44C}\x{1F44D}\x{1F44E}\x{270A}\x{1F44A}\x{1F91B}\x{1F91C}\x{1F91A}\x{1F44B}\x{1F91F}\x{270D}\x{1F44F}\x{1F450}\x{1F64C}\x{1F932}\x{1F64F}\x{1F485}\x{1F442}\x{1F443}](?:\x{1F3FD})|[\x{1F476}\x{1F9D2}\x{1F466}\x{1F467}\x{1F9D1}\x{1F468}\x{1F469}\x{1F9D3}\x{1F474}\x{1F475}\x{1F46E}\x{1F575}\x{1F482}\x{1F477}\x{1F934}\x{1F478}\x{1F473}\x{1F472}\x{1F9D5}\x{1F9D4}\x{1F471}\x{1F935}\x{1F470}\x{1F930}\x{1F931}\x{1F47C}\x{1F385}\x{1F936}\x{1F9D9}\x{1F9DA}\x{1F9DB}\x{1F9DC}\x{1F9DD}\x{1F64D}\x{1F64E}\x{1F645}\x{1F646}\x{1F481}\x{1F64B}\x{1F647}\x{1F926}\x{1F937}\x{1F486}\x{1F487}\x{1F6B6}\x{1F3C3}\x{1F483}\x{1F57A}\x{1F9D6}\x{1F9D7}\x{1F9D8}\x{1F6C0}\x{1F6CC}\x{1F574}\x{1F3C7}\x{1F3C2}\x{1F3CC}\x{1F3C4}\x{1F6A3}\x{1F3CA}\x{26F9}\x{1F3CB}\x{1F6B4}\x{1F6B5}\x{1F938}\x{1F93D}\x{1F93E}\x{1F939}\x{1F933}\x{1F4AA}\x{1F9B5}\x{1F9B6}\x{1F448}\x{1F449}\x{261D}\x{1F446}\x{1F595}\x{1F447}\x{270C}\x{1F91E}\x{1F596}\x{1F918}\x{1F919}\x{1F590}\x{270B}\x{1F44C}\x{1F44D}\x{1F44E}\x{270A}\x{1F44A}\x{1F91B}\x{1F91C}\x{1F91A}\x{1F44B}\x{1F91F}\x{270D}\x{1F44F}\x{1F450}\x{1F64C}\x{1F932}\x{1F64F}\x{1F485}\x{1F442}\x{1F443}](?:\x{1F3FC})|[\x{1F476}\x{1F9D2}\x{1F466}\x{1F467}\x{1F9D1}\x{1F468}\x{1F469}\x{1F9D3}\x{1F474}\x{1F475}\x{1F46E}\x{1F575}\x{1F482}\x{1F477}\x{1F934}\x{1F478}\x{1F473}\x{1F472}\x{1F9D5}\x{1F9D4}\x{1F471}\x{1F935}\x{1F470}\x{1F930}\x{1F931}\x{1F47C}\x{1F385}\x{1F936}\x{1F9D9}\x{1F9DA}\x{1F9DB}\x{1F9DC}\x{1F9DD}\x{1F64D}\x{1F64E}\x{1F645}\x{1F646}\x{1F481}\x{1F64B}\x{1F647}\x{1F926}\x{1F937}\x{1F486}\x{1F487}\x{1F6B6}\x{1F3C3}\x{1F483}\x{1F57A}\x{1F9D6}\x{1F9D7}\x{1F9D8}\x{1F6C0}\x{1F6CC}\x{1F574}\x{1F3C7}\x{1F3C2}\x{1F3CC}\x{1F3C4}\x{1F6A3}\x{1F3CA}\x{26F9}\x{1F3CB}\x{1F6B4}\x{1F6B5}\x{1F938}\x{1F93D}\x{1F93E}\x{1F939}\x{1F933}\x{1F4AA}\x{1F9B5}\x{1F9B6}\x{1F448}\x{1F449}\x{261D}\x{1F446}\x{1F595}\x{1F447}\x{270C}\x{1F91E}\x{1F596}\x{1F918}\x{1F919}\x{1F590}\x{270B}\x{1F44C}\x{1F44D}\x{1F44E}\x{270A}\x{1F44A}\x{1F91B}\x{1F91C}\x{1F91A}\x{1F44B}\x{1F91F}\x{270D}\x{1F44F}\x{1F450}\x{1F64C}\x{1F932}\x{1F64F}\x{1F485}\x{1F442}\x{1F443}](?:\x{1F3FB})|[\x{1F1E6}\x{1F1E7}\x{1F1E8}\x{1F1E9}\x{1F1F0}\x{1F1F2}\x{1F1F3}\x{1F1F8}\x{1F1F9}\x{1F1FA}](?:\x{1F1FF})|[\x{1F1E7}\x{1F1E8}\x{1F1EC}\x{1F1F0}\x{1F1F1}\x{1F1F2}\x{1F1F5}\x{1F1F8}\x{1F1FA}](?:\x{1F1FE})|[\x{1F1E6}\x{1F1E8}\x{1F1F2}\x{1F1F8}](?:\x{1F1FD})|[\x{1F1E6}\x{1F1E7}\x{1F1E8}\x{1F1EC}\x{1F1F0}\x{1F1F2}\x{1F1F5}\x{1F1F7}\x{1F1F9}\x{1F1FF}](?:\x{1F1FC})|[\x{1F1E7}\x{1F1E8}\x{1F1F1}\x{1F1F2}\x{1F1F8}\x{1F1F9}](?:\x{1F1FB})|[\x{1F1E6}\x{1F1E8}\x{1F1EA}\x{1F1EC}\x{1F1ED}\x{1F1F1}\x{1F1F2}\x{1F1F3}\x{1F1F7}\x{1F1FB}](?:\x{1F1FA})|[\x{1F1E6}\x{1F1E7}\x{1F1EA}\x{1F1EC}\x{1F1ED}\x{1F1EE}\x{1F1F1}\x{1F1F2}\x{1F1F5}\x{1F1F8}\x{1F1F9}\x{1F1FE}](?:\x{1F1F9})|[\x{1F1E6}\x{1F1E7}\x{1F1EA}\x{1F1EC}\x{1F1EE}\x{1F1F1}\x{1F1F2}\x{1F1F5}\x{1F1F7}\x{1F1F8}\x{1F1FA}\x{1F1FC}](?:\x{1F1F8})|[\x{1F1E6}\x{1F1E7}\x{1F1E8}\x{1F1EA}\x{1F1EB}\x{1F1EC}\x{1F1ED}\x{1F1EE}\x{1F1F0}\x{1F1F1}\x{1F1F2}\x{1F1F3}\x{1F1F5}\x{1F1F8}\x{1F1F9}](?:\x{1F1F7})|[\x{1F1E6}\x{1F1E7}\x{1F1EC}\x{1F1EE}\x{1F1F2}](?:\x{1F1F6})|[\x{1F1E8}\x{1F1EC}\x{1F1EF}\x{1F1F0}\x{1F1F2}\x{1F1F3}](?:\x{1F1F5})|[\x{1F1E6}\x{1F1E7}\x{1F1E8}\x{1F1E9}\x{1F1EB}\x{1F1EE}\x{1F1EF}\x{1F1F2}\x{1F1F3}\x{1F1F7}\x{1F1F8}\x{1F1F9}](?:\x{1F1F4})|[\x{1F1E7}\x{1F1E8}\x{1F1EC}\x{1F1ED}\x{1F1EE}\x{1F1F0}\x{1F1F2}\x{1F1F5}\x{1F1F8}\x{1F1F9}\x{1F1FA}\x{1F1FB}](?:\x{1F1F3})|[\x{1F1E6}\x{1F1E7}\x{1F1E8}\x{1F1E9}\x{1F1EB}\x{1F1EC}\x{1F1ED}\x{1F1EE}\x{1F1EF}\x{1F1F0}\x{1F1F2}\x{1F1F4}\x{1F1F5}\x{1F1F8}\x{1F1F9}\x{1F1FA}\x{1F1FF}](?:\x{1F1F2})|[\x{1F1E6}\x{1F1E7}\x{1F1E8}\x{1F1EC}\x{1F1EE}\x{1F1F2}\x{1F1F3}\x{1F1F5}\x{1F1F8}\x{1F1F9}](?:\x{1F1F1})|[\x{1F1E8}\x{1F1E9}\x{1F1EB}\x{1F1ED}\x{1F1F1}\x{1F1F2}\x{1F1F5}\x{1F1F8}\x{1F1F9}\x{1F1FD}](?:\x{1F1F0})|[\x{1F1E7}\x{1F1E9}\x{1F1EB}\x{1F1F8}\x{1F1F9}](?:\x{1F1EF})|[\x{1F1E6}\x{1F1E7}\x{1F1E8}\x{1F1EB}\x{1F1EC}\x{1F1F0}\x{1F1F1}\x{1F1F3}\x{1F1F8}\x{1F1FB}](?:\x{1F1EE})|[\x{1F1E7}\x{1F1E8}\x{1F1EA}\x{1F1EC}\x{1F1F0}\x{1F1F2}\x{1F1F5}\x{1F1F8}\x{1F1F9}](?:\x{1F1ED})|[\x{1F1E6}\x{1F1E7}\x{1F1E8}\x{1F1E9}\x{1F1EA}\x{1F1EC}\x{1F1F0}\x{1F1F2}\x{1F1F3}\x{1F1F5}\x{1F1F8}\x{1F1F9}\x{1F1FA}\x{1F1FB}](?:\x{1F1EC})|[\x{1F1E6}\x{1F1E7}\x{1F1E8}\x{1F1EC}\x{1F1F2}\x{1F1F3}\x{1F1F5}\x{1F1F9}\x{1F1FC}](?:\x{1F1EB})|[\x{1F1E6}\x{1F1E7}\x{1F1E9}\x{1F1EA}\x{1F1EC}\x{1F1EE}\x{1F1EF}\x{1F1F0}\x{1F1F2}\x{1F1F3}\x{1F1F5}\x{1F1F7}\x{1F1F8}\x{1F1FB}\x{1F1FE}](?:\x{1F1EA})|[\x{1F1E6}\x{1F1E7}\x{1F1E8}\x{1F1EC}\x{1F1EE}\x{1F1F2}\x{1F1F8}\x{1F1F9}](?:\x{1F1E9})|[\x{1F1E6}\x{1F1E8}\x{1F1EA}\x{1F1EE}\x{1F1F1}\x{1F1F2}\x{1F1F3}\x{1F1F8}\x{1F1F9}\x{1F1FB}](?:\x{1F1E8})|[\x{1F1E7}\x{1F1EC}\x{1F1F1}\x{1F1F8}](?:\x{1F1E7})|[\x{1F1E7}\x{1F1E8}\x{1F1EA}\x{1F1EC}\x{1F1F1}\x{1F1F2}\x{1F1F3}\x{1F1F5}\x{1F1F6}\x{1F1F8}\x{1F1F9}\x{1F1FA}\x{1F1FB}\x{1F1FF}](?:\x{1F1E6})|[\x{00A9}\x{00AE}\x{203C}\x{2049}\x{2122}\x{2139}\x{2194}-\x{2199}\x{21A9}-\x{21AA}\x{231A}-\x{231B}\x{2328}\x{23CF}\x{23E9}-\x{23F3}\x{23F8}-\x{23FA}\x{24C2}\x{25AA}-\x{25AB}\x{25B6}\x{25C0}\x{25FB}-\x{25FE}\x{2600}-\x{2604}\x{260E}\x{2611}\x{2614}-\x{2615}\x{2618}\x{261D}\x{2620}\x{2622}-\x{2623}\x{2626}\x{262A}\x{262E}-\x{262F}\x{2638}-\x{263A}\x{2640}\x{2642}\x{2648}-\x{2653}\x{2660}\x{2663}\x{2665}-\x{2666}\x{2668}\x{267B}\x{267E}-\x{267F}\x{2692}-\x{2697}\x{2699}\x{269B}-\x{269C}\x{26A0}-\x{26A1}\x{26AA}-\x{26AB}\x{26B0}-\x{26B1}\x{26BD}-\x{26BE}\x{26C4}-\x{26C5}\x{26C8}\x{26CE}-\x{26CF}\x{26D1}\x{26D3}-\x{26D4}\x{26E9}-\x{26EA}\x{26F0}-\x{26F5}\x{26F7}-\x{26FA}\x{26FD}\x{2702}\x{2705}\x{2708}-\x{270D}\x{270F}\x{2712}\x{2714}\x{2716}\x{271D}\x{2721}\x{2728}\x{2733}-\x{2734}\x{2744}\x{2747}\x{274C}\x{274E}\x{2753}-\x{2755}\x{2757}\x{2763}-\x{2764}\x{2795}-\x{2797}\x{27A1}\x{27B0}\x{27BF}\x{2934}-\x{2935}\x{2B05}-\x{2B07}\x{2B1B}-\x{2B1C}\x{2B50}\x{2B55}\x{3030}\x{303D}\x{3297}\x{3299}\x{1F004}\x{1F0CF}\x{1F170}-\x{1F171}\x{1F17E}-\x{1F17F}\x{1F18E}\x{1F191}-\x{1F19A}\x{1F201}-\x{1F202}\x{1F21A}\x{1F22F}\x{1F232}-\x{1F23A}\x{1F250}-\x{1F251}\x{1F300}-\x{1F321}\x{1F324}-\x{1F393}\x{1F396}-\x{1F397}\x{1F399}-\x{1F39B}\x{1F39E}-\x{1F3F0}\x{1F3F3}-\x{1F3F5}\x{1F3F7}-\x{1F3FA}\x{1F400}-\x{1F4FD}\x{1F4FF}-\x{1F53D}\x{1F549}-\x{1F54E}\x{1F550}-\x{1F567}\x{1F56F}-\x{1F570}\x{1F573}-\x{1F57A}\x{1F587}\x{1F58A}-\x{1F58D}\x{1F590}\x{1F595}-\x{1F596}\x{1F5A4}-\x{1F5A5}\x{1F5A8}\x{1F5B1}-\x{1F5B2}\x{1F5BC}\x{1F5C2}-\x{1F5C4}\x{1F5D1}-\x{1F5D3}\x{1F5DC}-\x{1F5DE}\x{1F5E1}\x{1F5E3}\x{1F5E8}\x{1F5EF}\x{1F5F3}\x{1F5FA}-\x{1F64F}\x{1F680}-\x{1F6C5}\x{1F6CB}-\x{1F6D2}\x{1F6E0}-\x{1F6E5}\x{1F6E9}\x{1F6EB}-\x{1F6EC}\x{1F6F0}\x{1F6F3}-\x{1F6F9}\x{1F910}-\x{1F93A}\x{1F93C}-\x{1F93E}\x{1F940}-\x{1F945}\x{1F947}-\x{1F970}\x{1F973}-\x{1F976}\x{1F97A}\x{1F97C}-\x{1F9A2}\x{1F9B0}-\x{1F9B9}\x{1F9C0}-\x{1F9C2}\x{1F9D0}-\x{1F9FF}]`) + rxExtraSpace = regexp.MustCompile("\\s{2,}") +) + +// IsEmpty 判断字符串是否为空 +func IsEmpty(s string) bool { + return len(s) == 0 +} + +func IsBlank(s string) bool { + return s == "" || strings.TrimSpace(s) == "" +} + +// ToNumber 字符串转换为唯一数字 +// https://stackoverflow.com/questions/5459436/how-can-i-generate-a-unique-int-from-a-unique-string +func ToNumber(s string) int { + number := 0 + runes := []rune(s) + for i, r := range runes { + x := 0 + if i != 0 { + x = int(runes[i-1]) + } + number += ((x << 16) | (x >> 16)) ^ int(r) + } + return number +} + +func ContainsChinese(str string) bool { + if str == "" { + return false + } + + for _, v := range str { + if unicode.Is(unicode.Han, v) { + return true + } + } + return false +} + +// ToNarrow Full width string to half width +func ToNarrow(str string) string { + return width.Narrow.String(str) +} + +// ToWiden Half with string to full width +func ToWiden(str string) string { + return width.Widen.String(str) +} + +// Split split word by special seps, use empty string if seps is empty +func Split(str string, separators ...string) []string { + if len(separators) == 0 { + return []string{str} + } + + texts := make([]string, 0) + if str != "" { + n := len(separators) + parts := strings.Split(str, separators[0]) + if n == 1 { + texts = parts + } else { + n -= 2 + for i, sep := range separators[1:] { + m := len(parts) + for _, part := range parts { + for _, s := range strings.Split(part, sep) { + s = strings.TrimSpace(s) + if s == "" { + continue + } + if n == i { + texts = append(texts, s) + } else { + parts = append(parts, s) + } + } + } + parts = append(parts[:0], parts[m:]...) + } + } + } + return texts +} + +func String(value interface{}) string { + switch val := value.(type) { + case []byte: + return string(val) + case string: + return val + } + v := reflect.ValueOf(value) + switch v.Kind() { + case reflect.Invalid: + return "" + case reflect.Bool: + return strconv.FormatBool(v.Bool()) + case reflect.String: + return v.String() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return strconv.FormatInt(v.Int(), 10) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return strconv.FormatUint(v.Uint(), 10) + case reflect.Float64: + return strconv.FormatFloat(v.Float(), 'f', -1, 64) + case reflect.Float32: + return strconv.FormatFloat(v.Float(), 'f', -1, 32) + case reflect.Ptr, reflect.Struct, reflect.Map, reflect.Slice, reflect.Array: + if b, err := json.Marshal(v.Interface()); err == nil { + return string(b) + } else { + return "" + } + default: + return fmt.Sprintf("%v", value) + } +} + +// RemoveEmoji 清理 Emoji 表情 +func RemoveEmoji(str string, trim bool) string { + if trim { + str = strings.TrimSpace(str) + } + if str != "" { + str = rxEmoji.ReplaceAllString(str, "") + } + return str +} + +// TrimAny 移除头部和尾部指定的内容 +func TrimAny(s string, sets ...string) string { + if s == "" || len(sets) == 0 { + return s + } + + trimSpace := false + fixedSets := make([]string, 0) + for _, set := range sets { + n := 0 + for _, r := range set { + if unicode.IsSpace(r) { + n++ + } + } + if n == len(set) { + trimSpace = true + } else { + fixedSets = append(fixedSets, set) // 不包括空白字符 + } + } + + if trimSpace { + s = strings.TrimSpace(s) + if s == "" { + return s + } + } + + if len(fixedSets) == 0 { + return s + } + + // 降序 + sort.Slice(fixedSets, func(i, j int) bool { + return len(fixedSets[i]) > len(fixedSets[j]) + }) + + minL := len(fixedSets[0]) + for i := range fixedSets { + n := len(fixedSets[i]) + if n < minL { + minL = n + } + } + + index := -1 + for i, set := range fixedSets { + if len(set) == minL { + index = i + break + } + } + + values := [][]string{fixedSets[0:index], slicex.StringSliceReverse(fixedSets)} + for _, value := range values { + for { + hitCounts := 0 + for _, set := range value { + setLen := len(set) + ss := []string{set} + start := StartsWith(s, ss, false) + if start { + s = s[setLen:] + if trimSpace { + s = strings.TrimSpace(s) + } + } + end := EndsWith(s, ss, false) + if end { + if len(s) > setLen { + s = s[0 : len(s)-setLen] + } + if trimSpace { + s = strings.TrimSpace(s) + } + } + if s == "" { + hitCounts = 0 + break + } else if start || end { + hitCounts++ + } + } + if hitCounts == 0 { + break + } + } + } + + return s +} + +// RemoveExtraSpace 移除多余的空格 +func RemoveExtraSpace(s string) string { + if s != "" { + s = strings.TrimSpace(s) + } + if s == "" { + return s + } + return rxExtraSpace.ReplaceAllLiteralString(strings.Replace(s, " ", " ", -1), " ") +} + +func ToBytes(s string) []byte { + sh := (*reflect.StringHeader)(unsafe.Pointer(&s)) + bh := reflect.SliceHeader{ + Data: sh.Data, + Len: sh.Len, + Cap: sh.Len, + } + return *(*[]byte)(unsafe.Pointer(&bh)) +} + +// WordMatched 判断 s 中是否包含指定的 words 单词 +// 对于英文必须是单词形式,比如 he 不能匹配 hello world,只能匹配 hello|world +// 如果是中文的话,则会使用 Index 进行判断,不等于 -1 则判断为匹配 +func WordMatched(s string, words []string, caseSensitive bool) bool { + if s == "" || len(words) == 0 { + return false + } + + var b strings.Builder + if !caseSensitive { + b.WriteString("(?i)") + } + b.WriteString(`(^|([\s\t\n]+))(`) + replacer := strings.NewReplacer( + "\\\\", "\\\\\\", + "(", "\\(", + "|", "\\|", + ")", "\\)", + "^", "\\^", + "$", "\\$", + ".", "\\.", + "[", "\\[", + "*", "\\*", + "+", "\\+", + "?", "\\?", + "{", "\\{", + "-", "\\-", + ",", "\\,", + ) + for i, word := range words { + n := 0 + word = strings.TrimSpace(word) + for _, w := range word { + if !syntax.IsWordChar(w) { + n++ + } + } + if n == len(word) { + return false + } + + if i != 0 { + b.WriteString("|") + } + b.WriteString(replacer.Replace(word)) + } + b.WriteString(`)($|([\s\t\n]+))`) + if re, err := regexp.Compile(b.String()); err == nil { + if re.MatchString(s) { + return true + } + } + + // 正则匹配失败后还需要判断是否为中文,为中文的话则直接使用 Index 函数进行判断是否存在 + if !caseSensitive { + s = strings.ToLower(s) + } + for _, word := range words { + if ContainsChinese(word) { + if !caseSensitive { + word = strings.ToLower(word) + } + if strings.Index(s, word) != -1 { + return true + } + } + } + return false +} + +func StartsWith(s string, ss []string, caseSensitive bool) bool { + n := len(s) + if ss == nil || n == 0 { + return true + } + + has := false + for _, prefix := range ss { + m := len(prefix) + if m == 0 { + has = true + } else { + if m <= n { + if caseSensitive { + has = strings.HasPrefix(s, prefix) + } else { + ns := s + if n-m > 0 { + ns = s[0:m] + } + has = strings.EqualFold(ns, prefix) + } + } + } + if has { + break + } + } + return has +} + +func EndsWith(s string, ss []string, caseSensitive bool) bool { + n := len(s) + if ss == nil || n == 0 { + return true + } + + has := false + for _, suffix := range ss { + m := len(suffix) + if m == 0 { + has = true + } else { + if m <= n { + if caseSensitive { + has = strings.HasSuffix(s, suffix) + } else { + ns := s + if n-m > 0 { + ns = s[n-m:] + } + has = strings.EqualFold(ns, suffix) + } + } + } + if has { + break + } + } + return has +} + +func Contains(s string, ss []string, caseSensitive bool) bool { + if len(ss) == 0 { + return false + } + + if !caseSensitive { + s = strings.ToLower(s) + } + for _, substr := range ss { + if substr == "" { + return true + } else { + if !caseSensitive { + substr = strings.ToLower(substr) + } + if strings.Contains(s, substr) { + return true + } + } + } + return false +} + +func QuoteMeta(s string) string { + if s == "" { + return s + } + + var buf bytes.Buffer + buf.Grow(len(s)) + for _, char := range s { + switch char { + case '.', '+', '\\', '(', ')', '[', ']', '$', '^', '*', '?': + buf.WriteRune('\\') + } + buf.WriteRune(char) + } + return buf.String() +} + +// HexToByte 16进制字符串转 []byte +// Like pack("H*", string) in PHP +func HexToByte(hex string) []byte { + length := len(hex) / 2 + bytes := make([]byte, length) + rs := []rune(hex) + + for i := 0; i < length; i++ { + s := string(rs[i*2 : i*2+2]) + value, _ := strconv.ParseInt(s, 16, 10) + bytes[i] = byte(value & 0xFF) + } + return bytes +} + +// SequentialWordFields +// 将 s 字符串根据指定的分隔符分隔为 1 - n 个连续单词组合的词组 +// 单词前后的非字母和数字将会移除掉(比如 help? 将会变成 help),单词中间的则不会处理(a?b 包含在单词中间的 ? 不会处理) +// +// stringx.SequentialWordFields("this is a string, are you sure?", 1, ",") => ["this", "is", "a", "string", "are", "you", "sure"] +// stringx.SequentialWordFields("this is a string, are you sure?", 2, ",") => ["this", "is", "a", "string", "are", "you", "sure", "this is", "is a", "a string", "are you", "you sure"] +func SequentialWordFields(s string, n int, separators ...string) []string { + s = strings.TrimSpace(s) + if s == "" { + return nil + } + + fields := make(map[string]string, 0) + sections := Split(s, separators...) + fnCleanWord := func(word string) string { + if word == "" { + return "" + } + return strings.TrimFunc(word, func(r rune) bool { + return !unicode.IsLetter(r) && !unicode.IsNumber(r) + }) + } + sb := strings.Builder{} + for _, section := range sections { + words := strings.Fields(section) + maxN := len(words) + validN := n + if n > maxN { + validN = maxN + } + for i, word := range words { + word = fnCleanWord(word) + if word == "" { + continue + } + fieldKey := strings.ToLower(word) + if _, ok := fields[fieldKey]; !ok { + fields[fieldKey] = word + } + if n > 1 { + validN = i + n + if validN > maxN { + validN = maxN + } + for jj := validN; jj > i; jj-- { + sb.Reset() + for ii := i; ii < jj; ii++ { + w := fnCleanWord(words[ii]) + if w != "" { + sb.WriteString(w) + if ii < jj-1 { + sb.WriteRune(' ') + } + } + } + if sb.Len() > 0 { + fieldKey = strings.ToLower(sb.String()) + if _, ok := fields[fieldKey]; !ok { + fields[fieldKey] = sb.String() + } + } + } + } + } + } + if len(fields) == 0 { + return nil + } + items := make([]string, len(fields)) + i := 0 + for _, v := range fields { + items[i] = v + i++ + } + return items +} + +// Len 获取字符串长度(一个中文算 1) +func Len(s string) int { + return len([]rune(s)) +} + +// UpperFirst 首字母大写 +func UpperFirst(s string) string { + r := []rune(s) + if len(s) > 0 && unicode.IsLetter(r[0]) && unicode.IsLower(r[0]) { + r[0] -= 32 + return string(r) + } + return s +} + +// LowerFirst 首字母小写 +func LowerFirst(s string) string { + r := []rune(s) + if len(s) > 0 && unicode.IsLetter(r[0]) && unicode.IsUpper(r[0]) { + r[0] += 32 + return string(r) + } + return s +} diff --git a/stringx/string_test.go b/stringx/string_test.go new file mode 100644 index 0000000..9e8a707 --- /dev/null +++ b/stringx/string_test.go @@ -0,0 +1,547 @@ +package stringx + +import ( + "strings" + "testing" + + "git.cloudyne.io/go/hiscaler-gox/slicex" + "github.com/stretchr/testify/assert" +) + +func TestIsEmpty(t *testing.T) { + testCases := []struct { + String string + IsEmpty bool + }{ + {"A", false}, + {"", true}, + {" ", false}, + {" ", false}, + {"   ", false}, + {` + + +`, false}, + {` + +a + +`, false}, + } + for i, testCase := range testCases { + b := IsEmpty(testCase.String) + if b != testCase.IsEmpty { + t.Errorf("%d: %s except %v, actual %v", i, testCase.String, testCase.IsEmpty, b) + } + } +} + +func TestIsBlank(t *testing.T) { + testCases := []struct { + String string + IsEmpty bool + }{ + {"A", false}, + {"", true}, + {" ", true}, + {" ", true}, + {"   ", true}, + {` + + +`, true}, + {` + +a + +`, false}, + } + for i, testCase := range testCases { + b := IsBlank(testCase.String) + if b != testCase.IsEmpty { + t.Errorf("%d: %s except %v, actual %v", i, testCase.String, testCase.IsEmpty, b) + } + } +} + +func TestContainsChinese(t *testing.T) { + type testCast struct { + String string + Has bool + } + testCasts := []testCast{ + {"", false}, + {"a", false}, + {"A_B", false}, + {"A_中B", true}, + } + for _, tc := range testCasts { + has := ContainsChinese(tc.String) + if has != tc.Has { + t.Errorf("%s except %v, actual:%v", tc.String, tc.Has, has) + } + } +} + +func TestToNarrow(t *testing.T) { + testCasts := []struct { + tag string + string string + expected string + }{ + {"t-letter", "abc", "abc"}, + {"t-number", "0123456789", "0123456789"}, + {"t-letter-number", "a0", "a0"}, + {"t-other", "~!@#$%^&*()-+?", "~!@#$%^&*()-+?"}, + } + for _, tc := range testCasts { + value := ToNarrow(tc.string) + assert.Equal(t, tc.expected, value, tc.tag) + } +} + +func TestToWiden(t *testing.T) { + testCasts := []struct { + tag string + string string + expected string + }{ + {"t-letter", "abc", "abc"}, + {"t-number", "0123456789", "0123456789"}, + {"t-letter-number", "a0", "a0"}, + {"t-other", "~!@#$%^&*()-+?", "~!@#$%^&*()-+?"}, + } + for _, tc := range testCasts { + value := ToWiden(tc.string) + assert.Equal(t, tc.expected, value, tc.tag) + } +} + +func TestSplit(t *testing.T) { + type testCast struct { + Number int + String string + Seps []string + Values []string + } + testCasts := []testCast{ + {1, "abc", []string{}, []string{"abc"}}, + {2, "a b c", []string{}, []string{"a b c"}}, + {3, "a b c,d", []string{}, []string{"a b c,d"}}, + {4, "a b c,d", []string{",", " "}, []string{"a", "b", "c", "d"}}, + {5, "a,b,c,d", []string{",", " "}, []string{"a", "b", "c", "d"}}, + {6, "a,b,c,d e", []string{",", " "}, []string{"a", "b", "c", "d", "e"}}, + {7, "a.,b,c,d e", []string{",", " "}, []string{"a.", "b", "c", "d", "e"}}, + {8, "a.,b,c,d e", []string{",", ".", " "}, []string{"a", "b", "c", "d", "e"}}, + {9, "a.,b,c,d e", []string{",", " "}, []string{"a.", "b", "c", "d", "e"}}, + {10, "a.,b,c,d e", []string{",", "", " "}, []string{"a", ".", "b", "c", "d", "e"}}, + {11, "hello, world!!!", []string{",", " ", "!"}, []string{"hello", "world"}}, + {12, "WaterWipes Original Baby Wipes, 99.9% Water, Unscented & Hypoallergenic for Sensitive Newborn Skin, 3 Packs (180 Count)", []string{",", " ", "!"}, []string{"WaterWipes", "Original", "Baby", "Wipes", "99.9%", "Water", "Unscented", "&", "Hypoallergenic", "for", "Sensitive", "Newborn", "Skin", "3", "Packs", "(180", "Count)"}}, + } + for _, tc := range testCasts { + values := Split(tc.String, tc.Seps...) + if !slicex.StringSliceEqual(values, tc.Values, false, false, true) { + t.Errorf("%d except %#v, actual:%#v", tc.Number, tc.Values, values) + } + } +} + +func TestString(t *testing.T) { + testCases := []struct { + Number int + Value interface{} + Except string + }{ + {1, false, "false"}, + {2, true, "true"}, + {3, 1, "1"}, + {4, 1.1, "1.1"}, + {5, "abc", "abc"}, + {6, [2]int{1, 2}, "[1,2]"}, + {7, []int{1, 2}, "[1,2]"}, + {8, []string{"a", "b"}, `["a","b"]`}, + {9, struct { + ID int + Name string + }{ID: 1, Name: "John"}, `{"ID":1,"Name":"John"}`}, + } + for _, testCase := range testCases { + s := String(testCase.Value) + if !strings.EqualFold(s, testCase.Except) { + t.Errorf("%d except: %s, actual: %s", testCase.Number, testCase.Except, s) + } + } +} + +func TestRemoveEmoji(t *testing.T) { + testCases := []struct { + Number int + BeforeString string + AfterString string + }{ + {1, "👶hi", "hi"}, + {2, "1👰", "1"}, + {3, "1👉2🤟👉👰3🤟👉👶你好🤟", "123你好"}, + {4, "1👉2🤟👉👰3🤟👉👶你   好🤟", "123你   好"}, + } + for _, testCase := range testCases { + s := RemoveEmoji(testCase.BeforeString, true) + if !strings.EqualFold(s, testCase.AfterString) { + t.Errorf("%d except: %s, actual: %s", testCase.Number, testCase.AfterString, s) + } + } +} + +func TestTrimAny(t *testing.T) { + var testCases = []struct { + string string + replacePairs []string + expected string + }{ + {" a", []string{}, " a"}, + {" 10GGGGgggggg", []string{"", "G"}, "10"}, + {" A", []string{}, " A"}, + {" Abc", []string{""}, "Abc"}, + {" Abc", []string{"", "", " "}, "Abc"}, + {" Abcd Efg ", []string{"", "ab", "FG"}, "cd E"}, + {" Abcd中文 Efg ", []string{"", "abcd", "中", "FG"}, "文 E"}, + {" Abcd中文 Efg ", []string{"", "中", "abcd", "FG"}, "文 E"}, + {" a", []string{"b", "c"}, " a"}, + {" 10kg", []string{"g", "kg", ""}, "10"}, + {" 10kgg", []string{"g", "kg", ""}, "10"}, + {" 10kg g", []string{"g", "kg", ""}, "10"}, + {" 10kg agbg", []string{"g", "ag", "bg", "kg", ""}, "10"}, + {" 10kg abgcdg", []string{"bg", "abg", "cdg", "kg", ""}, "10"}, + {" 10kg abgcdg", []string{"a", "b", "c", "d", "g", ""}, "10k"}, + {" 10kg ggkgg", []string{"kg", "g", ""}, "10"}, + {" a", []string{"a", "c"}, " "}, + {" a ", []string{}, " a "}, + {` + + a + + `, []string{}, ` + + a + + `}, + {" ab", []string{"b"}, " a"}, + {" a b ", []string{"b"}, " a b "}, + {" a b b", []string{"b"}, " a b "}, + {" a b a", []string{"b"}, " a b a"}, + {"5.0 out of 5 stars", []string{"5.0 out of", "stars"}, " 5 "}, + {"5.0 out of 5 stars", []string{"5.0 out of", "stars", ""}, "5"}, + {"5.0 out of 5 stars", []string{"5.0 out of", "5", "stars", " "}, ""}, + {"a b a b c d e f g g f e d", []string{"a", "b", "c", "d", "f g", " "}, "e f g g f e"}, + } + for _, testCase := range testCases { + actual := TrimAny(testCase.string, testCase.replacePairs...) + if actual != testCase.expected { + t.Errorf("TrimAny(`%s`, %#v) = `%s`; expected `%s`", testCase.string, testCase.replacePairs, actual, testCase.expected) + } + } +} + +func BenchmarkTrimAny(b *testing.B) { + for i := 0; i < b.N; i++ { + TrimAny("a b a b c d e f g g f e d", "a", "b", "c", "d", "f g", " ") + } +} + +func TestRemoveExtraSpace(t *testing.T) { + var testCases = []struct { + number int + string string + expected string + }{ + {1, " a", "a"}, + {2, " a", "a"}, + {3, " a", "a"}, + {4, " a ", "a"}, + {5, ` + +a + +`, "a"}, + {6, " ab", "ab"}, + {7, " a b ", "a b"}, + {8, " a b b", "a b b"}, + {9, "   hello, world!", "hello, world!"}, + {10, ` +   hello, + + + + + world! +`, "hello, world!"}, + {11, ` + +
+ + + +hello world + + + + +
+`, `
hello world
`}, + } + for _, testCase := range testCases { + actual := RemoveExtraSpace(testCase.string) + if actual != testCase.expected { + t.Errorf("%d RemoveExtraSpace(%s) = '%s'; expected %s", testCase.number, testCase.string, actual, testCase.expected) + } + } +} + +func TestToBytes(t *testing.T) { + tests := []struct { + tag string + bytesValue []byte + string string + }{ + {"t1", []byte{'a'}, "a"}, + {"t2", []byte("abc"), "abc"}, + {"t3", []byte("a b c "), "a b c "}, + } + for _, test := range tests { + b := ToBytes(test.string) + assert.Equal(t, test.bytesValue, b, test.tag) + } +} + +func TestWordMatched(t *testing.T) { + tests := []struct { + tag string + string string + words []string + caseSensitive bool + except bool + }{ + {"t1", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{"Towels", "B"}, true, true}, + {"t2", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{"towels", "B"}, false, true}, + {"t3.1", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{"tow", "A", "B"}, false, true}, + {"t3.2", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{"tow", "A", "B"}, true, false}, + {"t4", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{"tow"}, false, false}, + {"t5.1", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{"20"}, false, false}, + {"t5.2", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{"200"}, false, true}, + {"t6", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{"Blue Shop"}, true, true}, + {"t7.1", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{"blue shop"}, false, true}, + {"t7.2", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{"blue shop"}, true, false}, + {"t8", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{"Scott "}, false, true}, + {"t9", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{"Sheets "}, false, true}, + {"t10.1", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{`.`}, false, false}, + {"t10.2", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{`...................`}, false, false}, + {"t11", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{"*"}, false, false}, + {"t12", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{"***"}, false, false}, + {"t13.1", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{`.*`}, false, false}, + {"t13.2", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{`B.*x`}, false, false}, + {"t14", "Scott Blue Shop Towels in a Box - 200 Sheets.", []string{"."}, false, false}, + {"t14.1", "Scott Blue Shop Towels in a Box - 200 Sheets.", []string{".*"}, false, false}, + {"t14.2", "Scott Blue Shop Towels in a Box - 200 Sheets.", []string{"Sheets."}, false, true}, + {"t15", "Scott Blue Shop Towels in a Box - 200 Sheets?", []string{"?"}, false, false}, + {"t16", "Scott Blue Shop Towels in a Box - 200 Sheets", []string{"[Sheets]"}, false, false}, + {"t17", "Scott Blue Shop Towels in a Box a-a 200 Sheets", []string{"a-a"}, false, true}, + {"t18", "Scott Blue Shop Towels in a Box--200 Sheets", []string{"-"}, false, false}, + {"t19", "Scott Blue Shop Towels in a Box--200 Sheets", []string{"--"}, false, false}, + {"t20", "Scott Blue Shop Towels in a Box--200 Sheets", []string{"Box--200"}, false, true}, + {"t20.1", "Scott Blue Shop Towels in a Box~200 Sheets", []string{"Box"}, false, false}, + {"t21", "Scott Blue Shop Towels in a Box--200 Sheets 中文", []string{"中文"}, false, true}, + {"t22", "中文汉字", []string{"汉字"}, false, true}, + {"t23", "中a文b汉c字", []string{"汉c字"}, false, true}, + {"t24", "中a文b汉c字", []string{"汉C字"}, false, true}, + {"t25", "中a文b汉c字", []string{"汉C字"}, true, false}, + } + for _, test := range tests { + b := WordMatched(test.string, test.words, test.caseSensitive) + assert.Equal(t, test.except, b, test.tag) + } +} + +func BenchmarkWordMatched(b *testing.B) { + for i := 0; i < b.N; i++ { + WordMatched("Scott Blue Shop Towels in a Box--200 Sheets", []string{"Throw Pillow Covers", "Throw Pillows", "Patio Furniture Pillows", "Pillow Covers", "Pillowcases", "Pillow Case", "Pillow Cover", "scot", "scottt", "blu", "Shop Towels"}, true) + } +} + +func TestStartsWith(t *testing.T) { + tests := []struct { + tag string + string string + words []string + caseSensitive bool + except bool + }{ + {"t1", "Hello world!", []string{"he", "He"}, false, true}, + {"t2", "Hello world!", []string{"he", "He"}, true, true}, + {"t3", "Hello world!", []string{"he"}, true, false}, + {"t4", "", []string{""}, true, true}, + {"t5", "", nil, true, true}, + {"t6", "", []string{}, true, true}, + {"t7", "Hello world!", []string{""}, true, true}, + {"t8", "Hello!", []string{"Hello world"}, true, false}, + } + for _, test := range tests { + b := StartsWith(test.string, test.words, test.caseSensitive) + assert.Equal(t, test.except, b, test.tag) + } +} + +func BenchmarkStartsWith(b *testing.B) { + for i := 0; i < b.N; i++ { + StartsWith("Hello world!", []string{"a", "b", "c", "d", "e", "f", "g", "h"}, false) + } +} + +func TestEndsWith(t *testing.T) { + tests := []struct { + tag string + string string + words []string + caseSensitive bool + except bool + }{ + {"t1", "Hello world!", []string{"he", "He"}, false, false}, + {"t2", "Hello world!", []string{"he", "He"}, true, false}, + {"t3", "Hello world!", []string{"d!", "!"}, true, true}, + {"t4", "Hello world!", []string{"WORLD!"}, false, true}, + {"t5", "", []string{""}, true, true}, + {"t6", "", nil, true, true}, + {"t7", "", []string{}, true, true}, + {"t8", "Hello world!", []string{""}, true, true}, + {"t9", "world!", []string{"hello world!", "world!", "!"}, true, true}, + } + for _, test := range tests { + b := EndsWith(test.string, test.words, test.caseSensitive) + assert.Equal(t, test.except, b, test.tag) + } +} + +func TestContains(t *testing.T) { + tests := []struct { + tag string + string string + words []string + caseSensitive bool + except bool + }{ + {"t1", "Hello world!", []string{"ol", "LL"}, false, true}, + {"t2", "Hello world!", []string{"ol", "LL"}, true, false}, + {"t3", "Hello world!", []string{"notfound", "world"}, false, true}, + {"t4", "Hello world!", []string{"notfound", "world"}, true, true}, + {"t5", "", []string{""}, true, true}, + {"t6", "Hello world!", []string{""}, true, true}, + } + for _, test := range tests { + b := Contains(test.string, test.words, test.caseSensitive) + assert.Equal(t, test.except, b, test.tag) + } +} + +func BenchmarkContains(b *testing.B) { + for i := 0; i < b.N; i++ { + Contains("Customer satisfaction is important to us. We are confident with our fuzzy blanket, but if you are not satisfied with our blanket feel free to contact us. we will provide you with the most satisfactory solution", []string{"free"}, false) + } +} + +func TestQuoteMeta(t *testing.T) { + tests := []struct { + tag string + string string + expected string + }{ + {"t1", `.+\()[]$^*?`, `\.\+\\\(\)\[\]\$\^\*\?`}, + {"t1", `.+\()[]$^*?{}`, `\.\+\\\(\)\[\]\$\^\*\?{}`}, + } + for _, test := range tests { + b := QuoteMeta(test.string) + assert.Equal(t, test.expected, b, test.tag) + } +} + +func TestSequentialWordFields(t *testing.T) { + tests := []struct { + tag string + string string + n int + separators []string + expected []string + }{ + {"t1", "hello world", 1, []string{}, []string{"hello", "world"}}, + {"t2", "hello world", 2, []string{}, []string{"hello", "world", "hello world"}}, + {"t3", "hello world", 2, []string{}, []string{"hello", "world", "hello world"}}, + {"t4", "this is a string", 1, []string{}, []string{"this", "is", "a", "string"}}, + {"t5", "this is a string", 2, []string{}, []string{"this", "is", "a", "string", "this is", "is a", "a string"}}, + {"t6", "this is a string", 3, []string{}, []string{"this", "is", "a", "string", "this is", "this is a", "is a", "is a string", "a string"}}, + {"t7", "What's you name? My name is XiaoMing.", 3, []string{"?"}, []string{"What's", "you", "name", "My", "is", "XiaoMing", "What's you", "What's you name", "you name", "My name", "My name is", "name is", "name is XiaoMing", "is XiaoMing"}}, + {"t8", "a1, a2? b1 2b?", 3, []string{","}, []string{"a1", "a2", "b1", "2b", "a2 b1", "a2 b1 2b", "b1 2b"}}, + {"t9", "a1, a?2? b1 2b?~", 3, []string{","}, []string{"a1", "a?2", "b1", "2b", "a?2 b1", "a?2 b1 2b", "b1 2b"}}, + } + for _, test := range tests { + v := SequentialWordFields(test.string, test.n, test.separators...) + assert.ElementsMatch(t, test.expected, v, test.tag) + } +} + +func BenchmarkSequentialWordFields(b *testing.B) { + for i := 0; i < b.N; i++ { + SequentialWordFields("What's you name? My name is XiaoMing.", 3, []string{"?"}...) + } +} + +func TestLen(t *testing.T) { + testCases := []struct { + tag string + string string + expected int + }{ + {"t1", "hello", 5}, + {"t1", "hello world", 11}, + {"t1", "hello中国", 7}, + {"t1", "hello 中国", 8}, + {"t1", "你好中国", 4}, + } + for _, testCase := range testCases { + n := Len(testCase.string) + assert.Equal(t, testCase.expected, n, testCase.tag) + } +} + +func TestUpperFirst(t *testing.T) { + testCases := []struct { + tag string + string string + expected string + }{ + {"t1", "hello", "Hello"}, + {"t1", "hello world", "Hello world"}, + {"t1", "hello中国", "Hello中国"}, + {"t1", "hello 中国", "Hello 中国"}, + {"t1", "你好中国", "你好中国"}, + } + for _, testCase := range testCases { + s := UpperFirst(testCase.string) + assert.Equal(t, testCase.expected, s, testCase.tag) + } +} + +func TestLowerFirst(t *testing.T) { + testCases := []struct { + tag string + string string + expected string + }{ + {"t1", "Hello", "hello"}, + {"t1", "Hello world", "hello world"}, + {"t1", "Hello中国", "hello中国"}, + {"t1", "Hello 中国", "hello 中国"}, + {"t1", "你好中国", "你好中国"}, + } + for _, testCase := range testCases { + s := LowerFirst(testCase.string) + assert.Equal(t, testCase.expected, s, testCase.tag) + } +} diff --git a/timex/time.go b/timex/time.go new file mode 100644 index 0000000..a023b50 --- /dev/null +++ b/timex/time.go @@ -0,0 +1,123 @@ +package timex + +import ( + "math" + "time" +) + +// IsAmericaSummerTime 是否为美国夏令时间 +// 夏令时开始于每年3月的第二个周日凌晨,人们需要将时间调早 (顺时针) 1个小时; +// 夏令时结束于每年11月的第一个周日凌晨,人们需要将时间调晚 (逆时针) 1个小时。 +func IsAmericaSummerTime(t time.Time) (yes bool) { + if t.IsZero() { + return + } + + month := t.Month() + switch month { + case 4, 5, 6, 7, 8, 9, 10: + yes = true + case 3, 11: + day := t.Day() + t1 := t.AddDate(0, 0, -day+1) + weekday := int(t1.Weekday()) + if (month == 3 && day >= t1.AddDate(0, 0, 14-weekday).Day()) || + (month == 11 && day < t1.AddDate(0, 0, 7-weekday).Day()) { + yes = true + } + } + return +} + +// ChineseTimeLocation Return chinese time location +func ChineseTimeLocation() *time.Location { + loc, err := time.LoadLocation("Asia/Shanghai") + if err != nil { + loc = time.FixedZone("CST", 8*3600) + } + return loc +} + +func Between(t, begin, end time.Time) bool { + return (t.After(begin) && t.Before(end)) || t.Equal(begin) || t.Equal(end) +} + +func DayStart(t time.Time) time.Time { + return time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, time.Local) +} + +func DayEnd(t time.Time) time.Time { + return time.Date(t.Year(), t.Month(), t.Day(), 23, 59, 59, 999999999, time.Local) +} + +func MonthStart(t time.Time) time.Time { + return time.Date(t.Year(), t.Month(), 1, 0, 0, 0, 0, time.Local) +} + +func MonthEnd(t time.Time) time.Time { + return DayEnd(MonthStart(t).AddDate(0, 1, -1)) +} + +// IsAM Check is AM +func IsAM(t time.Time) bool { + return t.Hour() <= 11 +} + +// IsPM Check is PM +func IsPM(t time.Time) bool { + return t.Hour() >= 12 +} + +func WeekStart(yearWeek int) time.Time { + year := yearWeek / 100 + week := yearWeek % year + // Start from the middle of the year: + t := time.Date(year, 7, 1, 0, 0, 0, 0, time.UTC) + + // Roll back to Monday: + if wd := t.Weekday(); wd == time.Sunday { + t = t.AddDate(0, 0, -6) + } else { + t = t.AddDate(0, 0, -int(wd)+1) + } + + // Difference in weeks: + _, w := t.ISOWeek() + t = t.AddDate(0, 0, (week-w)*7) + + return t +} + +func WeekEnd(yearWeek int) time.Time { + t := WeekStart(yearWeek).AddDate(0, 0, 6) + return time.Date(t.Year(), t.Month(), t.Day(), 23, 59, 59, 0, time.UTC) +} + +func YearWeeksByWeek(startYearWeek, endYearWeek int) []int { + weeks := make([]int, 0) + weekStart := WeekStart(startYearWeek) + weekEnd := WeekStart(endYearWeek) + for { + if weekStart.After(weekEnd) { + break + } + y, w := weekStart.ISOWeek() + weeks = append(weeks, y*100+w) + weekStart = weekStart.AddDate(0, 0, 7) + } + return weeks +} + +func YearWeeksByTime(startDate, endDate time.Time) []int { + y1, w1 := startDate.ISOWeek() + y2, w2 := endDate.ISOWeek() + return YearWeeksByWeek(y1*100+w1, y2*100+w2) +} + +// XISOWeek 非 ISO 周,从周日开始算起作为一周的第一天 +func XISOWeek(t time.Time) (year, week int) { + t1 := time.Date(t.Year(), t.Month(), t.Day()+4-int(t.Weekday()), 0, 0, 0, 0, time.UTC) + startTime := time.Date(t1.Year(), 1, 1, 0, 0, 0, 0, time.UTC) + week = int(math.Ceil((float64((t1.Unix()-startTime.Unix())/86400) + 1) / 7)) + return startTime.Year(), week +} diff --git a/timex/time_test.go b/timex/time_test.go new file mode 100644 index 0000000..5fa7061 --- /dev/null +++ b/timex/time_test.go @@ -0,0 +1,217 @@ +package timex + +import ( + "fmt" + "github.com/stretchr/testify/assert" + "testing" + "time" +) + +func TestIsAmericaSummerTime(t *testing.T) { + testCases := []struct { + Date string + SummerTime bool + }{ + {"0001-01-01", false}, + {"2021-11-10", false}, + {"2021-12-10", false}, + {"2021-03-10", false}, + {"2021-03-14", true}, + {"2021-11-01", true}, + {"2021-10-10", true}, + {"2021-10-11", true}, + {"2021-10-12", true}, + {"2021-12-12", false}, + {"2022-03-15", true}, + } + for _, testCase := range testCases { + d, _ := time.Parse("2006-01-02", testCase.Date) + v := IsAmericaSummerTime(d) + if v != testCase.SummerTime { + t.Errorf("%s except %v, actual %v", testCase.Date, testCase.SummerTime, v) + } + } +} + +func TestBetween(t *testing.T) { + testCases := []struct { + tag string + t string + begin string + end string + expected bool + }{ + {"t1", "2022-01-01", "2022-01-01", "2022-01-01", true}, + {"t2", "2022-01-02", "2022-01-01", "2022-01-01", false}, + {"t3", "2022-01-02", "2022-01-01", "2022-01-02", true}, + } + layout := "2006-01-02" + for _, testCase := range testCases { + tv, _ := time.Parse(layout, testCase.t) + begin, _ := time.Parse(layout, testCase.begin) + end, _ := time.Parse(layout, testCase.end) + v := Between(tv, begin, end) + assert.Equal(t, testCase.expected, v, testCase.tag) + } +} + +func TestDayStart(t *testing.T) { + testCases := []struct { + tag string + time string + layout string + expected string + }{ + {"t1", "2022-01-01T12:12:00.924Z", "2006-01-02T15:04:05Z", "2022-01-01 00:00:00"}, + {"t2", "2022-01-01 00:00:00", "2006-01-02 15:04:05", "2022-01-01 00:00:00"}, + } + + for _, testCase := range testCases { + tv, err := time.Parse(testCase.layout, testCase.time) + if err != nil { + t.Errorf(err.Error()) + } + v := DayStart(tv).Format("2006-01-02 15:04:05") + assert.Equal(t, testCase.expected, v, testCase.tag) + } +} + +func TestDayEnd(t *testing.T) { + testCases := []struct { + tag string + time string + layout string + expected string + }{ + {"t1", "2022-01-01 12:12:00", "2006-01-02 15:04:05", "2022-01-01 23:59:59"}, + {"t2", "2022-01-01 00:00:00", "2006-01-02 15:04:05", "2022-01-01 23:59:59"}, + } + for _, testCase := range testCases { + tv, _ := time.Parse(testCase.layout, testCase.time) + v := DayEnd(tv).Format("2006-01-02 15:04:05") + assert.Equal(t, testCase.expected, v, testCase.tag) + } +} + +func TestMonthStart(t *testing.T) { + testCases := []struct { + tag string + time string + layout string + expected string + }{ + {"t1", "2022-01-12 12:12:00", "2006-01-02 15:04:05", "2022-01-01 00:00:00"}, + {"t2", "2022-01-21 00:00:00", "2006-01-02 15:04:05", "2022-01-01 00:00:00"}, + } + for _, testCase := range testCases { + tv, _ := time.Parse(testCase.layout, testCase.time) + v := MonthStart(tv).Format("2006-01-02 15:04:05") + assert.Equal(t, testCase.expected, v, testCase.tag) + } +} + +func TestMonthEnd(t *testing.T) { + testCases := []struct { + tag string + time string + layout string + expected string + }{ + {"t1", "2022-01-12 12:12:00", "2006-01-02 15:04:05", "2022-01-31 23:59:59"}, + {"t2", "2022-02-21 00:00:00", "2006-01-02 15:04:05", "2022-02-28 23:59:59"}, + } + for _, testCase := range testCases { + tv, _ := time.Parse(testCase.layout, testCase.time) + v := MonthEnd(tv).Format("2006-01-02 15:04:05") + assert.Equal(t, testCase.expected, v, testCase.tag) + } +} + +func TestWeekStart(t *testing.T) { + testCases := []struct { + tag string + yearWeek int + expected string + }{ + {"t1", 202201, "2022-01-03 00:00:00"}, + {"t2", 202202, "2022-01-10 00:00:00"}, + } + for _, testCase := range testCases { + v := WeekStart(testCase.yearWeek).Format("2006-01-02 15:04:05") + assert.Equal(t, testCase.expected, v, testCase.tag) + } +} + +func TestWeekEnd(t *testing.T) { + testCases := []struct { + tag string + yearWeek int + expected string + }{ + {"t1", 202201, "2022-01-09 23:59:59"}, + {"t2", 202202, "2022-01-16 23:59:59"}, + } + for _, testCase := range testCases { + v := WeekEnd(testCase.yearWeek).Format("2006-01-02 15:04:05") + assert.Equal(t, testCase.expected, v, testCase.tag) + } +} + +func TestYearWeeksByWeek(t *testing.T) { + testCases := []struct { + tag string + beginYearWeek int + endYearWeek int + expected []int + }{ + {"t1", 202201, 202202, []int{202201, 202202}}, + {"t2", 202201, 202204, []int{202201, 202202, 202203, 202204}}, + } + for _, testCase := range testCases { + v := YearWeeksByWeek(testCase.beginYearWeek, testCase.endYearWeek) + assert.Equal(t, testCase.expected, v, testCase.tag) + } +} + +func TestYearWeeksByTime(t *testing.T) { + testCases := []struct { + tag string + beginDate string + endDate string + expected []int + }{ + {"t1", "2022-01-01", "2022-01-02", []int{202152}}, + {"t2", "2022-01-01", "2022-02-02", []int{202152, 202201, 202202, 202203, 202204, 202205}}, + } + for _, testCase := range testCases { + beginDate, _ := time.Parse("2006-01-02", testCase.beginDate) + endDate, _ := time.Parse("2006-01-02", testCase.endDate) + v := YearWeeksByTime(beginDate, endDate) + assert.Equal(t, testCase.expected, v, testCase.tag) + } +} + +// https://savvytime.com/week-number +func TestXISOWeek(t *testing.T) { + testCases := []struct { + tag string + date string + expected string + }{ + {"t1", "2022-01-01", "202152"}, + {"t2", "2022-01-02", "202201"}, + {"t3", "2022-01-09", "202202"}, + {"t4", "2022-01-10", "202202"}, + {"t5", "2022-01-15", "202202"}, + {"t6", "2022-01-16", "202203"}, + {"t7", "2022-01-17", "202203"}, + {"t8", "2022-01-29", "202204"}, + {"t9", "2022-12-25", "202252"}, + {"t10", "2023-01-01", "202301"}, + } + for _, testCase := range testCases { + d, _ := time.Parse("2006-01-02", testCase.date) + year, week := XISOWeek(d) + assert.Equal(t, testCase.expected, fmt.Sprintf("%d%02d", year, week), testCase.tag) + } +} diff --git a/type.go b/type.go new file mode 100644 index 0000000..12b8fe6 --- /dev/null +++ b/type.go @@ -0,0 +1,17 @@ +package gox + +type Int interface { + ~int | ~int8 | ~int16 | ~int32 | ~int64 +} + +type UInt interface { + ~uint | ~uint8 | ~uint16 | ~uint32 | ~uint64 +} + +type Float interface { + ~float32 | ~float64 +} + +type Number interface { + Int | UInt | Float +}