diff --git a/cmd/server/app/service/machine/machinemanager.go b/cmd/server/app/service/machine/machinemanager.go index b7fcba25a8629a0f6a48debf0641126d93ca48c0..ffd5b0de396e542ef91e7f25bc5319e64912302b 100644 --- a/cmd/server/app/service/machine/machinemanager.go +++ b/cmd/server/app/service/machine/machinemanager.go @@ -9,10 +9,14 @@ package machine import ( "errors" + "time" + eventSDK "gitee.com/openeuler/PilotGo-plugins/event/sdk" departservice "gitee.com/openeuler/PilotGo/cmd/server/app/service/depart" + "gitee.com/openeuler/PilotGo/cmd/server/app/service/plugin" "gitee.com/openeuler/PilotGo/pkg/global" "gitee.com/openeuler/PilotGo/pkg/utils" + commonSDK "gitee.com/openeuler/PilotGo/sdk/common" "gitee.com/openeuler/PilotGo/cmd/server/app/service/internal/dao" ) @@ -91,6 +95,24 @@ func DeleteMachine(Deluuid []string) map[string]string { if err := dao.DeleteMachine(machinedeluuid); err != nil { machinelist[machinedeluuid] = err.Error() } + // 发布“平台移除主机”事件 + msgData := commonSDK.MessageData{ + MsgType: eventSDK.MsgHostRemove, + MessageType: eventSDK.GetMessageTypeString(eventSDK.MsgHostRemove), + TimeStamp: time.Now(), + Data: eventSDK.MDHostChange{ + IP: node.IP, + OS: node.Systeminfo, + CPU: node.CPU, + Status: OfflineStatus, + }, + } + msgDataString, _ := msgData.ToMessageDataString() + ms := commonSDK.EventMessage{ + MessageType: eventSDK.MsgHostRemove, + MessageData: msgDataString, + } + plugin.PublishEvent(ms) } else { machinelist[machinedeluuid] = errors.New("该机器不存在").Error() } diff --git a/go.mod b/go.mod index 6abb49662024ee4aad5a7162a03707d4d98b219f..b9acdc3daba21e5323a97e83d54f53378e2048bd 100644 --- a/go.mod +++ b/go.mod @@ -23,7 +23,7 @@ require ( github.com/swaggo/gin-swagger v1.6.0 github.com/swaggo/swag v1.16.3 github.com/tealeg/xlsx v1.0.5 - golang.org/x/crypto v0.29.0 + golang.org/x/crypto v0.30.0 gorm.io/driver/mysql v1.5.1 gorm.io/gorm v1.25.1 k8s.io/klog/v2 v2.9.0 @@ -53,10 +53,10 @@ require ( ) require ( - gitee.com/openeuler/PilotGo-plugins/event/sdk v0.0.0-20241122014720-df6a218375a4 + gitee.com/openeuler/PilotGo-plugins/event/sdk v0.0.0-20241204130048-1bc7cc64986f gitee.com/openeuler/PilotGo/sdk v0.0.0 github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible // indirect - github.com/bytedance/sonic v1.12.4 // indirect + github.com/bytedance/sonic v1.12.5 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect @@ -111,9 +111,9 @@ require ( go.uber.org/multierr v1.11.0 // indirect golang.org/x/arch v0.12.0 // indirect golang.org/x/exp v0.0.0-20230905200255-921286631fa9 // indirect - golang.org/x/net v0.31.0 // indirect - golang.org/x/sys v0.27.0 // indirect - golang.org/x/text v0.20.0 // indirect + golang.org/x/net v0.32.0 // indirect + golang.org/x/sys v0.28.0 // indirect + golang.org/x/text v0.21.0 // indirect google.golang.org/protobuf v1.35.2 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v3 v3.0.1 diff --git a/go.sum b/go.sum index d0328370e81136bdedcaec2e5ce332986411536f..575114e8e9dd71964026483045f6c063f9e5e46b 100644 --- a/go.sum +++ b/go.sum @@ -39,6 +39,8 @@ cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3f dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= gitee.com/openeuler/PilotGo-plugins/event/sdk v0.0.0-20241122014720-df6a218375a4 h1:3o8JG4/033Jhi5aVt0L2yY1LIw+z/TX7iUZIoBLNNhM= gitee.com/openeuler/PilotGo-plugins/event/sdk v0.0.0-20241122014720-df6a218375a4/go.mod h1:hM6k7LD5EzCbWN+v8QjWOFM+sMIp0l021pO8/Ds4sqg= +gitee.com/openeuler/PilotGo-plugins/event/sdk v0.0.0-20241204130048-1bc7cc64986f h1:xV+HcMlRTjGSrXIQyxIlvP3yArGBTFDPMaC5xstt5y8= +gitee.com/openeuler/PilotGo-plugins/event/sdk v0.0.0-20241204130048-1bc7cc64986f/go.mod h1:hM6k7LD5EzCbWN+v8QjWOFM+sMIp0l021pO8/Ds4sqg= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.0.0/go.mod h1:uGG2W01BaETf0Ozp+QxxKJdMBNRWPdstHG0Fmdwn1/U= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.1.2/go.mod h1:uGG2W01BaETf0Ozp+QxxKJdMBNRWPdstHG0Fmdwn1/U= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.0/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= @@ -90,6 +92,8 @@ github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJm github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= github.com/bytedance/sonic v1.12.4 h1:9Csb3c9ZJhfUWeMtpCDCq6BUoH5ogfDFLUgQ/jG+R0k= github.com/bytedance/sonic v1.12.4/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk= +github.com/bytedance/sonic v1.12.5 h1:hoZxY8uW+mT+OpkcUWw4k0fDINtOcVavEsGfzwzFU/w= +github.com/bytedance/sonic v1.12.5/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk= github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= github.com/bytedance/sonic/loader v0.2.1 h1:1GgorWTqf12TA8mma4DDSbaQigE2wOgQo7iCjjJv3+E= github.com/bytedance/sonic/loader v0.2.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= @@ -700,6 +704,8 @@ golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= +golang.org/x/crypto v0.30.0 h1:RwoQn3GkWiMkzlX562cLB7OxWvjH1L8xutO2WoJcRoY= +golang.org/x/crypto v0.30.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -795,6 +801,8 @@ golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo= golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM= +golang.org/x/net v0.32.0 h1:ZqPmj8Kzc+Y6e0+skZsuACbx+wzMgo5MQsJh9Qd6aYI= +golang.org/x/net v0.32.0/go.mod h1:CwU0IoeOlnQQWJ6ioyFrfRuomB8GKF6KbYXZVyeXNfs= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -819,6 +827,7 @@ golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -895,6 +904,8 @@ golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= @@ -904,6 +915,7 @@ golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU= golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E= +golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -919,6 +931,8 @@ golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= +golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= diff --git a/go.work.sum b/go.work.sum index 91d577c544467939b81fc47fb2e29d858f1d879d..d26950d8c9a138c8c3b86ee0df269da3264baa9e 100644 --- a/go.work.sum +++ b/go.work.sum @@ -94,7 +94,6 @@ github.com/fatih/color v1.14.1/go.mod h1:2oHN61fhTpgcxD3TSWCgKDiH1+x4OiDVVGH8Wlg github.com/felixge/httpsnoop v1.0.1 h1:lvB5Jl89CsZtGIWuTcDM1E/vkVs49/Ml7JJe07l8SPQ= github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/form3tech-oss/jwt-go v3.2.3+incompatible h1:7ZaBxOI7TMoYBfyA3cQHErNNyAWIKUMIwqxEtgHOs5c= -github.com/frankban/quicktest v1.14.4/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= github.com/fxamacker/cbor/v2 v2.6.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ= github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= @@ -119,7 +118,6 @@ github.com/go-openapi/spec v0.19.3/go.mod h1:FpwSN1ksY1eteniUU7X0N/BgJ7a4WvBFVA8 github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= github.com/go-openapi/swag v0.19.14 h1:gm3vOOXfiuw5i9p5N9xJvfjvuofpyvLA9Wr6QfK5Fng= github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= -github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0 h1:p104kn46Q8WdvHunIJ9dAyjPVtrBPhSr3KT2yUst43I= @@ -134,14 +132,12 @@ github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6 github.com/google/btree v1.0.0 h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo= github.com/google/btree v1.0.1 h1:gK4Kx5IaGY9CD5sPJ36FHiBJ6ZXl0kilRiiCj+jdYp4= github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U= -github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw= github.com/google/gofuzz v1.1.0 h1:Hsa8mG0dQ46ij8Sl2AYJDUv1oA9/d6Vk+3LG99Oe02g= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian/v3 v3.1.0 h1:wCKgOCHuUEVfsaQLpPSJb7VdYCdTVZQAuOdYm1yc/60= github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo= github.com/google/renameio v0.1.0 h1:GOZbcHa3HfsPKPlmyPyN2KEohoMXOhdMbHrvbpl2QaA= github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= @@ -224,7 +220,6 @@ github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZY github.com/konsorten/go-windows-terminal-sequences v1.0.3 h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8= github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515 h1:T+h1c/A9Gawja4Y9mFVWj2vyii2bbUNDw3kt9VxK2EY= -github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1 h1:VkoXIwSboBpnk99O/KFauAEILuNHv5DVFKZMBN/gUgw= github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= @@ -263,12 +258,10 @@ github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OS github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= github.com/onsi/ginkgo/v2 v2.0.0/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= github.com/onsi/ginkgo/v2 v2.15.0/go.mod h1:HlxMHtYF57y6Dpf+mc5529KKmSq9h2FpCF+/ZkwUxKM= github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= -github.com/onsi/gomega v1.18.1/go.mod h1:0q+aL8jAiMXy9hbwj2mr5GziHiwhAIQpFmmtT5hitRs= github.com/onsi/gomega v1.31.0/go.mod h1:DW9aCi7U6Yi40wNVAvT6kzFnEVEI5n3DloYBiKiT6zk= github.com/opentracing/opentracing-go v1.1.0 h1:pWlfV3Bxv7k65HYwkikxat0+s3pV4bsqf19k25Ur8rU= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c h1:Lgl0gzECD8GnQ5QCWA8o6BtfL6mDH5rQgM4/fX3avOs= @@ -291,7 +284,6 @@ github.com/prometheus/procfs v0.10.1/go.mod h1:nwNm2aOCAYw8uTR/9bWRREkZFxAUcWzPH github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= github.com/prometheus/tsdb v0.7.1 h1:YZcsG11NqnK4czYLrWd9mpEuAJIHVQLwdrleYfszMAA= github.com/rogpeppe/fastuuid v1.2.0 h1:Ppwyp6VYCF1nvBTXL3trRso7mXMlRrw9ooo375wvi2s= -github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f h1:UFr9zpz4xgTnIE5yIMtWAMngCdZ9p/+q6lTbgelo80M= github.com/sagikazarmark/crypt v0.15.0 h1:TQJg76CemcIdJyC9/dmNjU9OUyIFHyvE50Tpq1t1nqY= github.com/sagikazarmark/crypt v0.15.0/go.mod h1:5rwNNax6Mlk9sZ40AcyVtiEw24Z4J04cfSioF2COKmc= @@ -309,7 +301,6 @@ github.com/stoewer/go-strcase v1.2.0 h1:Z2iHWqGXH00XYgqDmNgQbIBxf3wrNq0F3feEy0ai github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= -github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/subosito/gotenv v1.4.2/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= github.com/tmc/grpc-websocket-proxy v0.0.0-20201229170055-e5319fda7802 h1:uruHq4dN7GR16kFc5fp3d1RIYzJW5onx8Ybykw2YQFA= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= @@ -368,7 +359,6 @@ golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 h1:VLliZ0d+/avPrXXH+OakdXhp golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028 h1:4+4C/Iv2U4fMZBiMCc98MG1In4gJY5YRhtpDNeDeHWs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20210224082022-3d97a244fca7/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= @@ -378,7 +368,7 @@ golang.org/x/oauth2 v0.12.0/go.mod h1:A74bZ3aGXgCY0qaIC9Ahg6Lglin4AMAco8cIv9baba golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210426230700-d19ff857e887/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -387,9 +377,10 @@ golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/telemetry v0.0.0-20240208230135-b75ee8823808/go.mod h1:KG1lNk5ZFNssSZLrpVb4sMXKMpGwGXOxSG3rnu2gZQQ= +golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= -golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E= +golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= diff --git a/vendor/gitee.com/openeuler/PilotGo-plugins/event/sdk/common.go b/vendor/gitee.com/openeuler/PilotGo-plugins/event/sdk/common.go new file mode 100644 index 0000000000000000000000000000000000000000..d6a16d7b053c12b4b191c78125a599c0833096f0 --- /dev/null +++ b/vendor/gitee.com/openeuler/PilotGo-plugins/event/sdk/common.go @@ -0,0 +1,121 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo-plugins licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Wed Jul 24 10:02:04 2024 +0800 + */ +package sdk + +import "time" + +// event消息类型定义/msgTypeId +const ( + // 用户登录 + MsgUserLogin = 0 + // 用户退出 + MsgUserLogout = 1 + + // 平台新增主机 + MsgHostAdd = 10 + // 平台移除主机 + MsgHostRemove = 11 + // 主机上线 + MsgHostOnline = 12 + // 平台离线 + MsgHostOffline = 13 + + // 主机安装软件包 + MsgPackageInstall = 20 + // 主机升级软件包 + MsgPackageUpdate = 21 + // 主机卸载软件包 + MsgPackageUninstall = 22 + // 主机ip变更 + MsgIPChange = 30 + + // 插件添加 + MsgPluginAdd = 40 + // 插件卸载 + MsgPluginRemove = 41 + // 插件上线 + MsgPluginOnline = 42 + // 插件离线 + MsgPluginOffline = 43 +) + +func GetMessageTypeString(msgType int) string { + switch msgType { + case MsgUserLogin: + return "用户登录" + case MsgUserLogout: + return "用户退出" + case MsgHostAdd: + return "平台新增主机" + case MsgHostRemove: + return "平台移除主机" + case MsgHostOnline: + return "主机上线" + case MsgHostOffline: + return "主机离线" + case MsgPackageInstall: + return "主机安装软件包" + case MsgPackageUpdate: + return "主机升级软件包" + case MsgPackageUninstall: + return "主机卸载软件包" + case MsgIPChange: + return "主机ip变更" + case MsgPluginAdd: + return "插件添加" + case MsgPluginRemove: + return "插件卸载" + case MsgPluginOnline: + return "插件上线" + case MsgPluginOffline: + return "插件离线" + default: + return "未知的消息类型" + } +} + +type MessageData struct { + MsgType int `json:"msg_type_id"` + MessageType string `json:"msg_type"` + TimeStamp time.Time `json:"timestamp"` + Data interface{} `json:"data"` +} + +type MDUserSystemSession struct { //平台登录、退出 + UserName string `json:"user_name"` + Email string `json:"email"` +} + +type MDHostChange struct { // 主机新增、移除、上线、离线 + IP string `json:"ip"` + OS string `json:"os"` + OSVersion string `json:"os_version"` + PrettyName string `json:"pretty_name"` + CPU string `json:"cpu"` + Status string `json:"status"` //在线状态 +} + +type MDHostPackageOpt struct { //软件包安装、升级、卸载 + HostIP string `json:"hostIp"` + Name string `json:"name"` + Version string `json:"version"` +} + +type MDHostIPChange struct { //主机ip变更 + HostUUID string `json:"host_uuid"` + NewIP string `json:"new_ip"` + OldIP string `json:"old_ip"` +} + +type MDPluginChange struct { // 插件新增、移除、上线、离线 + PluginName string `json:"plugin_name"` + Version string `json:"version"` + Url string `json:"url"` + Description string `json:"description"` + Status bool `json:"status"` +} diff --git a/vendor/gitee.com/openeuler/PilotGo-plugins/event/sdk/event.go b/vendor/gitee.com/openeuler/PilotGo-plugins/event/sdk/event.go new file mode 100644 index 0000000000000000000000000000000000000000..3a8bea5a6e817ab8c0eb9276305a329cdb13c415 --- /dev/null +++ b/vendor/gitee.com/openeuler/PilotGo-plugins/event/sdk/event.go @@ -0,0 +1,114 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo-plugins licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Wed Jul 24 10:02:04 2024 +0800 + */ +package sdk + +import ( + "encoding/json" + "errors" + "io" + "os" + "os/signal" + "syscall" + + "gitee.com/openeuler/PilotGo/sdk/common" + "gitee.com/openeuler/PilotGo/sdk/logger" + "gitee.com/openeuler/PilotGo/sdk/plugin/client" + "github.com/gin-gonic/gin" +) + +func UnPluginListenEventHandler() { + sig := make(chan os.Signal, 1) + signal.Notify(sig, syscall.SIGINT, syscall.SIGTERM, syscall.SIGQUIT) + go func() { + logger.Info("插件监听注册goroutine已启动") + defer logger.Info("插件取消监听goroutine退出") + for { + s := <-sig + switch s { + case syscall.SIGINT, syscall.SIGTERM, syscall.SIGQUIT: + logger.Info("接收到退出信号: %s", s.String()) + UnPluginListenEvent() + os.Exit(0) + default: + logger.Info("接收到未知信号: %s", s.String()) + } + } + }() +} +func RegisterEventHandlers(router *gin.Engine, c *client.Client) { + + api := router.Group("/plugin_manage/api/v1/") + { + api.POST("/event", eventHandler) + } + plugin_client = c + startEventProcessor(c) +} + +func eventHandler(c *gin.Context) { + j, err := io.ReadAll(c.Request.Body) // 接收数据 + if err != nil { + logger.Error("没获取到:%s", err.Error()) + return + } + var msg common.EventMessage + if err := json.Unmarshal(j, &msg); err != nil { + logger.Error("反序列化结果失败%s", err.Error()) + return + } + + ProcessEvent(&msg) +} + +func eventPluginServer() (string, error) { + plugins, err := plugin_client.GetPlugins() + if err != nil { + return "", err + } + + var eventServer string + for _, p := range plugins { + if p.Name == "event" { + eventServer = p.Url + break + } + } + + if eventServer == "" { + return "", errors.New("event plugin not found") + } + + return eventServer, nil +} + +func registerEventCallback(eventType int, callback common.EventCallback) { + plugin_client.EventCallbackMap[eventType] = callback +} + +func unregisterEventCallback(eventType int) { + delete(plugin_client.EventCallbackMap, eventType) +} + +func ProcessEvent(event *common.EventMessage) { + plugin_client.EventChan <- event +} + +func startEventProcessor(c *client.Client) { + go func(c *client.Client) { + for { + e := <-c.EventChan + + // TODO: process event message + cb, ok := c.EventCallbackMap[e.MessageType] + if ok { + cb(e) + } + } + }(c) + +} diff --git a/vendor/gitee.com/openeuler/PilotGo-plugins/event/sdk/listener.go b/vendor/gitee.com/openeuler/PilotGo-plugins/event/sdk/listener.go new file mode 100644 index 0000000000000000000000000000000000000000..8f9d73fc0ecd33622c55e981d31e4cc1af6ac406 --- /dev/null +++ b/vendor/gitee.com/openeuler/PilotGo-plugins/event/sdk/listener.go @@ -0,0 +1,182 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo-plugins licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Wed Jul 24 10:02:04 2024 +0800 + */ +package sdk + +import ( + "encoding/json" + "errors" + "net/http" + "strconv" + "strings" + + "gitee.com/openeuler/PilotGo/sdk/common" + "gitee.com/openeuler/PilotGo/sdk/plugin/client" + "gitee.com/openeuler/PilotGo/sdk/utils/httputils" +) + +var plugin_client *client.Client + +// 注册event事件监听 +func ListenEvent(eventTypes []int, callbacks common.EventCallback) error { + var eventtypes []string + for _, i := range eventTypes { + eventtypes = append(eventtypes, strconv.Itoa(i)) + } + + eventServer, err := eventPluginServer() + if err != nil { + return err + } + + url := eventServer + "/plugin/event/listener/register?eventTypes=" + strings.Join(eventtypes, ",") + r, err := httputils.Put(url, &httputils.Params{ + Body: plugin_client.PluginInfo, + }) + if err != nil { + return err + } + if r.StatusCode != http.StatusOK { + return errors.New("server process error:" + strconv.Itoa(r.StatusCode)) + } + + resp := &common.CommonResult{} + if err := json.Unmarshal(r.Body, resp); err != nil { + return err + } + if resp.Code != http.StatusOK { + return errors.New(resp.Message) + } + + data := &struct { + Status string `json:"status"` + Error string `json:"error"` + }{} + if err := resp.ParseData(data); err != nil { + return err + } + for _, eventType := range eventTypes { + registerEventCallback(eventType, callbacks) + } + return nil +} + +// 取消注册event事件监听 +func UnListenEvent(eventTypes []int) error { + var eventtypes []string + for _, i := range eventTypes { + eventtypes = append(eventtypes, strconv.Itoa(i)) + } + eventServer, err := eventPluginServer() + if err != nil { + return err + } + + url := eventServer + "/plugin/event/listener/unregister?eventTypes=" + strings.Join(eventtypes, ",") + r, err := httputils.Delete(url, &httputils.Params{ + Body: plugin_client.PluginInfo, + }) + if err != nil { + return err + } + if r.StatusCode != http.StatusOK { + return errors.New("server process error:" + strconv.Itoa(r.StatusCode)) + } + + resp := &common.CommonResult{} + if err := json.Unmarshal(r.Body, resp); err != nil { + return err + } + if resp.Code != http.StatusOK { + return errors.New(resp.Message) + } + + data := &struct { + Status string `json:"status"` + Error string `json:"error"` + }{} + if err := resp.ParseData(data); err != nil { + return err + } + + for _, eventType := range eventTypes { + unregisterEventCallback(eventType) + } + return nil +} + +// 插件服务退出,取消注册所有本插件的event事件监听 +func UnPluginListenEvent() error { + eventServer, err := eventPluginServer() + if err != nil { + return err + } + + url := eventServer + "/plugin/event/listener/unpluginRegister" + r, err := httputils.Delete(url, &httputils.Params{ + Body: plugin_client.PluginInfo, + }) + if err != nil { + return err + } + if r.StatusCode != http.StatusOK { + return errors.New("server process error:" + strconv.Itoa(r.StatusCode)) + } + + resp := &common.CommonResult{} + if err := json.Unmarshal(r.Body, resp); err != nil { + return err + } + if resp.Code != http.StatusOK { + return errors.New(resp.Message) + } + data := &struct { + EventType []int `json:"eventType"` + Status string `json:"status"` + }{} + if err := resp.ParseData(data); err != nil || data.Status != "ok" { + return err + } + for _, eventType := range data.EventType { + unregisterEventCallback(eventType) + } + return nil +} + +// 发布event事件 +func PublishEvent(msg common.EventMessage) error { + eventServer, err := eventPluginServer() + if err != nil { + return err + } + url := eventServer + "/plugin/event/publishEvent" + r, err := httputils.Put(url, &httputils.Params{ + Body: &msg, + }) + if err != nil { + return err + } + if r.StatusCode != http.StatusOK { + return errors.New("server process error:" + strconv.Itoa(r.StatusCode)) + } + + resp := &common.CommonResult{} + if err := json.Unmarshal(r.Body, resp); err != nil { + return err + } + if resp.Code != http.StatusOK { + return errors.New(resp.Message) + } + data := &struct { + Status string `json:"status"` + Error string `json:"error"` + }{} + if err := resp.ParseData(data); err != nil { + return err + } + return nil +} diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/common/common.go b/vendor/gitee.com/openeuler/PilotGo/sdk/common/common.go index 9670b2e152dbdb40957ed2bf9994904d92b02619..cedd84baeee314641a066bd8e1cdf13658bc889b 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/common/common.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/common/common.go @@ -1,11 +1,39 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Tue Jul 23 15:50:05 2024 +0800 + */ package common -import "encoding/json" +import ( + "encoding/json" + "time" +) + +type MessageData struct { + MsgType int `json:"msg_type_id"` + MessageType string `json:"msg_type"` + TimeStamp time.Time `json:"timestamp"` + Data interface{} `json:"data"` +} type EventMessage struct { - MessageType int - MessageData string + MessageType int `json:"msgType"` + MessageData string `json:"msgData"` } + +type EventCallback func(e *EventMessage) + +func (msgData *MessageData) ToMessageDataString() (string, error) { + data, err := json.Marshal(msgData) + if err != nil { + return "", err + } + return string(data), nil +} + type CommonResult struct { Code int `json:"code"` Message string `json:"msg"` @@ -15,5 +43,3 @@ type CommonResult struct { func (r *CommonResult) ParseData(d interface{}) error { return json.Unmarshal(r.Data, d) } - -type EventCallback func(e *EventMessage) diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/common/extention.go b/vendor/gitee.com/openeuler/PilotGo/sdk/common/extention.go index 90fe3522fc91dabc4d6eedf69e0b1bca027cd62c..d848919b1dc41369a5e7ec6d223e622a756f5569 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/common/extention.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/common/extention.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: Gzx1999 + * Date: Tue Nov 7 15:01:17 2023 +0800 + */ package common const ( diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/common/machine.go b/vendor/gitee.com/openeuler/PilotGo/sdk/common/machine.go index 2a8f7d0a6e2b18f51882ccefaa6471530d2d406a..387a6b48cd80c8f3210956b554e34f1165ed621e 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/common/machine.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/common/machine.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Wed Sep 27 17:35:12 2023 +0800 + */ package common type MachineNode struct { diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/common/permission.go b/vendor/gitee.com/openeuler/PilotGo/sdk/common/permission.go index ebbc060b5e485364ea1acf2dc981db3014233f8f..7c0313de7f6dab129229a7b473587088010b28e7 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/common/permission.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/common/permission.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: wubijie + * Date: Thu Jan 11 20:24:26 2024 +0800 + */ package common type Permission struct { diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/common/script.go b/vendor/gitee.com/openeuler/PilotGo/sdk/common/script.go index 8c6144536e7ae5d19ea9033bcc791c5f9c1c7601..fbf5ae7316a17a2adcb74ff114eed31964c46657 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/common/script.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/common/script.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: Gzx1999 + * Date: Wed Oct 11 16:18:11 2023 +0800 + */ package common type AsyncCmdResult struct { diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/common/service.go b/vendor/gitee.com/openeuler/PilotGo/sdk/common/service.go index c5a8df8572bfae4dc6d7d4ff227471e731fa4540..ac4ad730c85965947fddb5e1c9dd7f7e1c14d8d1 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/common/service.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/common/service.go @@ -1,58 +1,65 @@ -package common - -const ( - ServiceActiveStatusRunning = "running" - ServiceActiveStatusExited = "exited" - ServiceActiveStatusWaiting = "waiting" - ServiceActiveStatusInactive = "inactive" - ServiceActiveStatusUnknown = "unknown" - - ServiceLoadedStatusEnabled = "enabled" - ServiceLoadedStatusDisabled = "disabled" - ServiceLoadedStatusStatic = "static" - ServiceLoadedStatusMask = "mask" - ServiceLoadedStatusUnknown = "unknown" -) - -const ( - ServiceUnit = "service" - SocketUnit = "socket" - TargetUnit = "target" - MountUnit = "mount" - AutomountUnit = "automount" - PathUnit = "path" - TimeUnit = "time" -) - -type ServiceResult struct { - MachineUUID string - MachineIP string - ServiceSample ServiceInfo -} - -type ServiceInfo struct { - ServiceName string - UnitName string - UnitType string - ServicePath string //配置文件放置的目录 - ServiceAfter string //在什么服务启动后启动 - ServiceBefore string //在什么服务启动前启动 - ServiceRequires string //需要的daemon - ServiceWants string //与requires相反 - ServiceEnvironmentFile string //启动脚本的环境配置文件 - ServiceExectStart string //实际执行daemon的指令或脚本程序 - ServiceActiveStatus string - ServiceLoadedStatus string - StartTime string -} - -type ServiceStruct struct { - Batch *Batch `json:"batch"` - ServiceName string `json:"service"` -} - -type Result struct { - Code int `json:"code"` - Mseeage string `json:"msg"` - Data []*ServiceResult `json:"data"` -} +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Wed Sep 27 17:35:12 2023 +0800 + */ +package common + +const ( + ServiceActiveStatusRunning = "running" + ServiceActiveStatusExited = "exited" + ServiceActiveStatusWaiting = "waiting" + ServiceActiveStatusInactive = "inactive" + ServiceActiveStatusUnknown = "unknown" + + ServiceLoadedStatusEnabled = "enabled" + ServiceLoadedStatusDisabled = "disabled" + ServiceLoadedStatusStatic = "static" + ServiceLoadedStatusMask = "mask" + ServiceLoadedStatusUnknown = "unknown" +) + +const ( + ServiceUnit = "service" + SocketUnit = "socket" + TargetUnit = "target" + MountUnit = "mount" + AutomountUnit = "automount" + PathUnit = "path" + TimeUnit = "time" +) + +type ServiceResult struct { + MachineUUID string + MachineIP string + ServiceSample ServiceInfo +} + +type ServiceInfo struct { + ServiceName string + UnitName string + UnitType string + ServicePath string //配置文件放置的目录 + ServiceAfter string //在什么服务启动后启动 + ServiceBefore string //在什么服务启动前启动 + ServiceRequires string //需要的daemon + ServiceWants string //与requires相反 + ServiceEnvironmentFile string //启动脚本的环境配置文件 + ServiceExectStart string //实际执行daemon的指令或脚本程序 + ServiceActiveStatus string + ServiceLoadedStatus string + StartTime string +} + +type ServiceStruct struct { + Batch *Batch `json:"batch"` + ServiceName string `json:"service"` +} + +type Result struct { + Code int `json:"code"` + Mseeage string `json:"msg"` + Data []*ServiceResult `json:"data"` +} diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/common/status.go b/vendor/gitee.com/openeuler/PilotGo/sdk/common/status.go index 5d0cd8c3335befaa745710983b2ee19635be8336..2e6cef40b203d8d09caee6f378e492d7831e4b13 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/common/status.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/common/status.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Wed Sep 27 17:35:12 2023 +0800 + */ // 提供公共数据结构定义 package common diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/common/tag.go b/vendor/gitee.com/openeuler/PilotGo/sdk/common/tag.go index dab5e54e80dada87daeb21e496225366a3c406e9..5f8de3ab85ead78c4e40a6ed54d12e8d51039f2e 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/common/tag.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/common/tag.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: wubijie + * Date: Thu Nov 2 16:38:53 2023 +0800 + */ package common const ( diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/logger/logger.go b/vendor/gitee.com/openeuler/PilotGo/sdk/logger/logger.go index aa9b00a16ede0556e94dbc7dd26cd41e25e18b79..aad3575be8844055f0ef4f2ce6148013cb406253 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/logger/logger.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/logger/logger.go @@ -1,17 +1,10 @@ -/****************************************************************************** - * Copyright (c) KylinSoft Co., Ltd.2021-2022. All rights reserved. - * PilotGo is licensed under the Mulan PSL v2. - * You can use this software accodring to the terms and conditions of the Mulan PSL v2. - * You may obtain a copy of Mulan PSL v2 at: - * http://license.coscl.org.cn/MulanPSL2 - * THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, - * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. - * See the Mulan PSL v2 for more details. - * Author: yangzhao1 - * Date: 2022-03-01 09:59:30 - * LastEditTime: 2022-04-05 11:37:16 - * Description: provide agent log manager of pilotgo - ******************************************************************************/ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Wed Sep 27 17:35:12 2023 +0800 + */ package logger import ( @@ -120,41 +113,45 @@ func Fatal(format string, args ...interface{}) { logrus.Fatalf(format, args...) } -func RequestLogger() gin.HandlerFunc { +func RequestLogger(_skipPaths []string) gin.HandlerFunc { + var skip map[string]struct{} + if len(_skipPaths) > 0 { + skip = make(map[string]struct{}, len(_skipPaths)) + + for _, path := range _skipPaths { + skip[path] = struct{}{} + } + } return func(c *gin.Context) { - // 开始时间 - startTime := time.Now() + start := time.Now() + path := c.Request.URL.Path + raw := c.Request.URL.RawQuery - // 处理请求 c.Next() - // 结束时间 - endTime := time.Now() - - // 执行时间 - latencyTime := endTime.Sub(startTime) - - // 请求方式 - reqMethod := c.Request.Method - - // 请求路由 - reqUri := c.Request.RequestURI - - // 状态码 - statusCode := c.Writer.Status() - - // 请求IP - clientIP := c.ClientIP() - - // 日志格式 - if reqUri != "/api/v1/pluginapi/heartbeat" && reqUri != "/plugin/prometheus/target" { - Debug("status_code:%d latency_time:%s client_ip:%s req_method:%s req_uri:%s", + if _, ok := skip[path]; !ok { + endTime := time.Now() + latency := endTime.Sub(start) + method := c.Request.Method + statusCode := c.Writer.Status() + clientIP := c.ClientIP() + errorMessage := c.Errors.ByType(gin.ErrorTypePrivate).String() + + if raw != "" { + path = path + "?" + raw + } + if latency > time.Minute { + latency = latency.Truncate(time.Second) + } + + Debug("%s status_code:%3d latency_time:%-13v client_ip:%-15s method:%-7s req_uri:%#v\n%s", + start.Format("2006-01-02 15:04:05"), statusCode, - latencyTime, + latency, clientIP, - reqMethod, - reqUri, - ) + method, + path, + errorMessage) } } } diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/batch.go b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/batch.go index 088761aab7d49d209415d1e3db0e7036f718de7f..70f1f99af2eb036a5ef9cc12b2f5afdf95fcc3dc 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/batch.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/batch.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Fri Jan 19 11:08:30 2024 +0800 + */ package client import ( diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/bindsync.go b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/bindsync.go index f73f97cd488d06e9d837aedd7f3b631182352508..83145caa4361716f06a51d5cd1894d9221fae895 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/bindsync.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/bindsync.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: wubijie + * Date: Wed Dec 27 19:38:17 2023 +0800 + */ package client import ( diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/client.go b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/client.go index 6bf95ba6eb5cbd1432736e62cacb56c99563cbb8..dcae6b878882762a634396cf9558ea866257821f 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/client.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/client.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Wed Sep 27 17:35:12 2023 +0800 + */ package client import ( diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/config.go b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/config.go index 643bb9bac0c527c14200e24d6919daf89417bbc9..41add3e37a4b5520f18a77919a44bfa4de4ab04f 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/config.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/config.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Wed Sep 27 17:35:12 2023 +0800 + */ package client import ( diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/extention.go b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/extention.go index a0d233efcc79d4531f37b35a1270315f67882f53..1f7955035ebd2c3514e1fad54e01576fcac27a09 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/extention.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/extention.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: Gzx1999 + * Date: Tue Nov 7 15:01:17 2023 +0800 + */ package client import ( diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/fileservice.go b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/fileservice.go index fe67a8f37518e9244a651e4686a1d38153a9648c..3fe24a816e0537968ed1346189eb39e98544a0af 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/fileservice.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/fileservice.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Tue Mar 12 15:33:09 2024 +0800 + */ package client import ( diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/handler.go b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/handler.go index 109ffe061202a2cc13343589328c49b9e9cc14d0..d8e1389a1f5fc70c84e5eb149236ea9c39581f33 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/handler.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/handler.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Wed Sep 27 17:35:12 2023 +0800 + */ package client import ( diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/heartbeat.go b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/heartbeat.go index fa1fb9ce81f7a12f5afdea91406b783284d6bf1b..f4aedd3fb935c943b76e25c2c1ffa2041ec598af 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/heartbeat.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/heartbeat.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Mon Dec 18 10:57:09 2023 +0800 + */ package client import ( diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/machine.go b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/machine.go index 94043cfcfc3e9b22a4954c01c263ae6167738d9f..85bc2294a9d7c74967750ee19357de2d56dc65aa 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/machine.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/machine.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Wed Sep 27 17:35:12 2023 +0800 + */ package client import ( diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/permission.go b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/permission.go index 155b66e6463ee8c8320fbe6342d61456d45b937b..10352de3b1ec4769d199042f46a6debe87b96ab8 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/permission.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/permission.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: wubijie + * Date: Thu Jan 11 20:24:26 2024 +0800 + */ package client import ( diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/plugin.go b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/plugin.go index 9466bf7559c4468d1102b7c2144db6009f623028..299ab8f1b4a9efe4249644d8019c5cd3b3e68359 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/plugin.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/plugin.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Wed Sep 27 17:35:12 2023 +0800 + */ package client import ( diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/script.go b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/script.go index 2d28745e89f5bd2860e71201ec48976062f9d4b1..ccf9776b230ada7f7ef7d0ee88c5f1cf0287378a 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/script.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/script.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Wed Sep 27 17:35:12 2023 +0800 + */ package client import ( diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/service.go b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/service.go index 29597123325bcb098ffe8740891162296ecea300..a2fe2c661aaf8f757ee66a2ba96f1d79dce2e338 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/service.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/plugin/client/service.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Wed Sep 27 17:35:12 2023 +0800 + */ package client import ( diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/response/response.go b/vendor/gitee.com/openeuler/PilotGo/sdk/response/response.go index ac0d707a0415f384a1fcc2c1ba610d3230c6aa23..04d5d6f281480d2b6a8cf6aa64be58bd3b9a8eac 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/response/response.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/response/response.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Wed Sep 27 17:35:12 2023 +0800 + */ package response import ( diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/utils/config/config.go b/vendor/gitee.com/openeuler/PilotGo/sdk/utils/config/config.go index 01a467a0c944a355ec0b75ef5828c292e40f9ef2..37573e366735d415f0ff827bf6c5c7f066abb087 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/utils/config/config.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/utils/config/config.go @@ -1,17 +1,10 @@ -/****************************************************************************** - * Copyright (c) KylinSoft Co., Ltd.2021-2022. All rights reserved. - * PilotGo is licensed under the Mulan PSL v2. - * You can use this software accodring to the terms and conditions of the Mulan PSL v2. - * You may obtain a copy of Mulan PSL v2 at: - *     http://license.coscl.org.cn/MulanPSL2 - * THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, - * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. - * See the Mulan PSL v2 for more details. - * Author: zhanghan - * Date: 2021-11-18 13:03:16 - * LastEditTime: 2022-04-06 14:22:15 - * Description: provide configure yaml functions. - ******************************************************************************/ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: wubijie + * Date: Thu Oct 19 15:11:20 2023 +0800 + */ package config import ( diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/utils/httputils/judgeProtocol.go b/vendor/gitee.com/openeuler/PilotGo/sdk/utils/httputils/judgeProtocol.go index 5d3249364384293bfe4a093ba37ab450f6fe5b48..8b1c861022a7e0f0eb91021cc766c15ddea470c4 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/utils/httputils/judgeProtocol.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/utils/httputils/judgeProtocol.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: Wangjunqi123 + * Date: Wed Jun 12 14:00:31 2024 +0800 + */ package httputils import ( @@ -17,7 +24,7 @@ func ServerIsHttp(rawurl string) (bool, error) { } url_string := fmt.Sprintf("http://%s", net.JoinHostPort(url.Hostname(), url.Port())) - req, err := http.NewRequest(http.MethodGet, url_string, nil) + req, err := http.NewRequest("GET", url_string, nil) if err != nil { return false, err } diff --git a/vendor/gitee.com/openeuler/PilotGo/sdk/utils/httputils/request.go b/vendor/gitee.com/openeuler/PilotGo/sdk/utils/httputils/request.go index c5b7689044cea8c2b109d30f057fda13c6a3c537..0f88478161c70289663638c0f43583f8edad1d7e 100644 --- a/vendor/gitee.com/openeuler/PilotGo/sdk/utils/httputils/request.go +++ b/vendor/gitee.com/openeuler/PilotGo/sdk/utils/httputils/request.go @@ -1,3 +1,10 @@ +/* + * Copyright (c) KylinSoft Co., Ltd. 2024.All rights reserved. + * PilotGo licensed under the Mulan Permissive Software License, Version 2. + * See LICENSE file for more details. + * Author: zhanghan2021 + * Date: Wed Sep 27 17:35:12 2023 +0800 + */ package httputils import ( diff --git a/vendor/github.com/bytedance/sonic/.gitignore b/vendor/github.com/bytedance/sonic/.gitignore index 0d8844705b1b556fd90e3d9079a1e54655ff738a..fa60f43a290700225e005281621ecff804425a71 100644 --- a/vendor/github.com/bytedance/sonic/.gitignore +++ b/vendor/github.com/bytedance/sonic/.gitignore @@ -49,4 +49,7 @@ ast/bench.sh !testdata/*.json.gz fuzz/testdata -*__debug_bin \ No newline at end of file +*__debug_bin* +*pprof +*coverage.txt +tools/venv/* \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/.gitmodules b/vendor/github.com/bytedance/sonic/.gitmodules index b8d11c9215af700d1a0622528e8e7dc823286985..ea84b991a976cca11cc91d9c185485b6daf14c65 100644 --- a/vendor/github.com/bytedance/sonic/.gitmodules +++ b/vendor/github.com/bytedance/sonic/.gitmodules @@ -1,3 +1,6 @@ -[submodule "tools/asm2asm"] +[submodule "cloudwego"] path = tools/asm2asm - url = https://github.com/chenzhuoyu/asm2asm + url = https://github.com/cloudwego/asm2asm.git +[submodule "tools/simde"] + path = tools/simde + url = https://github.com/simd-everywhere/simde.git diff --git a/vendor/github.com/bytedance/sonic/Makefile b/vendor/github.com/bytedance/sonic/Makefile deleted file mode 100644 index 8cc0acf15cd931139af73e128aae3962033cae53..0000000000000000000000000000000000000000 --- a/vendor/github.com/bytedance/sonic/Makefile +++ /dev/null @@ -1,112 +0,0 @@ -# -# Copyright 2021 ByteDance Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -ARCH := avx avx2 sse -TMP_DIR := output -OUT_DIR := internal/native -SRC_FILE := native/native.c - -CPU_avx := amd64 -CPU_avx2 := amd64 -CPU_sse := amd64 - -TMPL_avx := fastint_amd64_test fastfloat_amd64_test native_amd64_test native_export_amd64 -TMPL_avx2 := fastint_amd64_test fastfloat_amd64_test native_amd64_test native_export_amd64 -TMPL_sse := fastint_amd64_test fastfloat_amd64_test native_amd64_test native_export_amd64 - -CFLAGS_avx := -msse -mno-sse4 -mavx -mpclmul -mno-avx2 -DUSE_AVX=1 -DUSE_AVX2=0 -CFLAGS_avx2 := -msse -mno-sse4 -mavx -mpclmul -mavx2 -DUSE_AVX=1 -DUSE_AVX2=1 -CFLAGS_sse := -msse -mno-sse4 -mno-avx -mno-avx2 -mpclmul - -CC_amd64 := clang -ASM2ASM_amd64 := tools/asm2asm/asm2asm.py - -CFLAGS := -mno-red-zone -CFLAGS += -target x86_64-apple-macos11 -CFLAGS += -fno-asynchronous-unwind-tables -CFLAGS += -fno-builtin -CFLAGS += -fno-exceptions -CFLAGS += -fno-rtti -CFLAGS += -fno-stack-protector -CFLAGS += -nostdlib -CFLAGS += -O3 -CFLAGS += -Wall -Werror - -NATIVE_SRC := $(wildcard native/*.h) -NATIVE_SRC += $(wildcard native/*.c) - -.PHONY: all clean ${ARCH} - -define build_tmpl - $(eval @arch := $(1)) - $(eval @tmpl := $(2)) - $(eval @dest := $(3)) - -${@dest}: ${@tmpl} - mkdir -p $(dir ${@dest}) - echo '// Code generated by Makefile, DO NOT EDIT.' > ${@dest} - echo >> ${@dest} - sed -e 's/{{PACKAGE}}/${@arch}/g' ${@tmpl} >> ${@dest} -endef - -define build_arch - $(eval @cpu := $(value CPU_$(1))) - $(eval @deps := $(foreach tmpl,$(value TMPL_$(1)),${OUT_DIR}/$(1)/${tmpl}.go)) - $(eval @asmin := ${TMP_DIR}/$(1)/native.s) - $(eval @asmout := ${OUT_DIR}/$(1)/native_${@cpu}.s) - $(eval @stubin := ${OUT_DIR}/native_${@cpu}.tmpl) - $(eval @stubout := ${OUT_DIR}/$(1)/native_${@cpu}.go) - -$(1): ${@asmout} ${@deps} - -${@asmout}: ${@stubout} ${NATIVE_SRC} - mkdir -p ${TMP_DIR}/$(1) - $${CC_${@cpu}} $${CFLAGS} $${CFLAGS_$(1)} -S -o ${TMP_DIR}/$(1)/native.s ${SRC_FILE} - python3 $${ASM2ASM_${@cpu}} ${@asmout} ${TMP_DIR}/$(1)/native.s - asmfmt -w ${@asmout} - -$(eval $(call \ - build_tmpl, \ - $(1), \ - ${@stubin}, \ - ${@stubout} \ -)) - -$(foreach \ - tmpl, \ - $(value TMPL_$(1)), \ - $(eval $(call \ - build_tmpl, \ - $(1), \ - ${OUT_DIR}/${tmpl}.tmpl, \ - ${OUT_DIR}/$(1)/${tmpl}.go \ - )) \ -) -endef - -all: ${ARCH} - -clean: - for arch in ${ARCH}; do \ - rm -vfr ${TMP_DIR}/$${arch}; \ - rm -vfr ${OUT_DIR}/$${arch}; \ - done - -$(foreach \ - arch, \ - ${ARCH}, \ - $(eval $(call build_arch,${arch})) \ -) diff --git a/vendor/github.com/bytedance/sonic/README.md b/vendor/github.com/bytedance/sonic/README.md index cdb32a57de487d4178dc63b75be3fffb9834dc2d..5f609b1c340afa07e2325b6c4b860d1ca3df8efe 100644 --- a/vendor/github.com/bytedance/sonic/README.md +++ b/vendor/github.com/bytedance/sonic/README.md @@ -5,18 +5,27 @@ English | [中文](README_ZH_CN.md) A blazingly fast JSON serializing & deserializing library, accelerated by JIT (just-in-time compiling) and SIMD (single-instruction-multiple-data). ## Requirement -- Go 1.15~1.20 -- Linux/MacOS/Windows -- Amd64 ARCH + +- Go: 1.17~1.23 +- OS: Linux / MacOS / Windows +- CPU: AMD64 / ARM64(need go1.20 above) ## Features + - Runtime object binding without code generation - Complete APIs for JSON value manipulation - Fast, fast, fast! +## APIs + +see [go.dev](https://pkg.go.dev/github.com/bytedance/sonic) + ## Benchmarks + For **all sizes** of json and **all scenarios** of usage, **Sonic performs best**. + - [Medium](https://github.com/bytedance/sonic/blob/main/decoder/testdata_test.go#L19) (13KB, 300+ key, 6 layers) + ```powershell goversion: 1.17.1 goos: darwin @@ -76,15 +85,21 @@ BenchmarkSetOne_Jsoniter-16 79475 ns/op 163.8 BenchmarkSetOne_Parallel_Sonic-16 850.9 ns/op 15305.31 MB/s 1584 B/op 17 allocs/op BenchmarkSetOne_Parallel_Sjson-16 18194 ns/op 715.77 MB/s 52247 B/op 9 allocs/op BenchmarkSetOne_Parallel_Jsoniter-16 33560 ns/op 388.05 MB/s 45892 B/op 964 allocs/op +BenchmarkLoadNode/LoadAll()-16 11384 ns/op 1143.93 MB/s 6307 B/op 25 allocs/op +BenchmarkLoadNode_Parallel/LoadAll()-16 5493 ns/op 2370.68 MB/s 7145 B/op 25 allocs/op +BenchmarkLoadNode/Interface()-16 17722 ns/op 734.85 MB/s 13323 B/op 88 allocs/op +BenchmarkLoadNode_Parallel/Interface()-16 10330 ns/op 1260.70 MB/s 15178 B/op 88 allocs/op ``` + - [Small](https://github.com/bytedance/sonic/blob/main/testdata/small.go) (400B, 11 keys, 3 layers) ![small benchmarks](./docs/imgs/bench-small.png) - [Large](https://github.com/bytedance/sonic/blob/main/testdata/twitter.json) (635KB, 10000+ key, 6 layers) ![large benchmarks](./docs/imgs/bench-large.png) -See [bench.sh](https://github.com/bytedance/sonic/blob/main/bench.sh) for benchmark codes. +See [bench.sh](https://github.com/bytedance/sonic/blob/main/scripts/bench.sh) for benchmark codes. ## How it works + See [INTRODUCTION.md](./docs/INTRODUCTION.md). ## Usage @@ -92,6 +107,7 @@ See [INTRODUCTION.md](./docs/INTRODUCTION.md). ### Marshal/Unmarshal Default behaviors are mostly consistent with `encoding/json`, except HTML escaping form (see [Escape HTML](https://github.com/bytedance/sonic/blob/main/README.md#escape-html)) and `SortKeys` feature (optional support see [Sort Keys](https://github.com/bytedance/sonic/blob/main/README.md#sort-keys)) that is **NOT** in conformity to [RFC8259](https://datatracker.ietf.org/doc/html/rfc8259). + ```go import "github.com/bytedance/sonic" @@ -103,8 +119,11 @@ err := sonic.Unmarshal(output, &data) ``` ### Streaming IO -Sonic supports decoding json from `io.Reader` or encoding objects into `io.`Writer`, aims at handling multiple values as well as reducing memory consumption. + +Sonic supports decoding json from `io.Reader` or encoding objects into `io.Writer`, aims at handling multiple values as well as reducing memory consumption. + - encoder + ```go var o1 = map[string]interface{}{ "a": "b", @@ -119,7 +138,9 @@ fmt.Println(w.String()) // {"a":"b"} // 1 ``` + - decoder + ```go var o = map[string]interface{}{} var r = strings.NewReader(`{"a":"b"}{"1":"2"}`) @@ -132,6 +153,7 @@ fmt.Printf("%+v", o) ``` ### Use Number/Use Int64 + ```go import "github.com/bytedance/sonic/decoder" @@ -160,7 +182,9 @@ fm := root.Interface().(float64) // jn == jm ``` ### Sort Keys + On account of the performance loss from sorting (roughly 10%), sonic doesn't enable this feature by default. If your component depends on it to work (like [zstd](https://github.com/facebook/zstd)), Use it like this: + ```go import "github.com/bytedance/sonic" import "github.com/bytedance/sonic/encoder" @@ -173,19 +197,26 @@ v, err := encoder.Encode(m, encoder.SortMapKeys) var root := sonic.Get(JSON) err := root.SortKeys() ``` + ### Escape HTML + On account of the performance loss (roughly 15%), sonic doesn't enable this feature by default. You can use `encoder.EscapeHTML` option to open this feature (align with `encoding/json.HTMLEscape`). + ```go import "github.com/bytedance/sonic" v := map[string]string{"&&":"<>"} ret, err := Encode(v, EscapeHTML) // ret == `{"\u0026\u0026":{"X":"\u003c\u003e"}}` ``` + ### Compact Format + Sonic encodes primitive objects (struct/map...) as compact-format JSON by default, except marshaling `json.RawMessage` or `json.Marshaler`: sonic ensures validating their output JSON but **DONOT** compacting them for performance concerns. We provide the option `encoder.CompactMarshaler` to add compacting process. ### Print Error + If there invalid syntax in input JSON, sonic will return `decoder.SyntaxError`, which supports pretty-printing of error position + ```go import "github.com/bytedance/sonic" import "github.com/bytedance/sonic/decoder" @@ -211,7 +242,9 @@ if err != nil { ``` #### Mismatched Types [Sonic v1.6.0] + If there a **mismatch-typed** value for a given key, sonic will report `decoder.MismatchTypeError` (if there are many, report the last one), but still skip wrong the value and keep decoding next JSON. + ```go import "github.com/bytedance/sonic" import "github.com/bytedance/sonic/decoder" @@ -224,10 +257,15 @@ err := UnmarshalString(`{"A":"1","B":1}`, &data) println(err.Error()) // Mismatch type int with value string "at index 5: mismatched type with value\n\n\t{\"A\":\"1\",\"B\":1}\n\t.....^.........\n" fmt.Printf("%+v", data) // {A:0 B:1} ``` + ### Ast.Node + Sonic/ast.Node is a completely self-contained AST for JSON. It implements serialization and deserialization both and provides robust APIs for obtaining and modification of generic data. + #### Get/Index + Search partial JSON by given paths, which must be non-negative integer or string, or nil + ```go import "github.com/bytedance/sonic" @@ -241,10 +279,29 @@ raw := root.Raw() // == string(input) root, err := sonic.Get(input, "key1", 1, "key2") sub := root.Get("key3").Index(2).Int64() // == 3 ``` + **Tip**: since `Index()` uses offset to locate data, which is much faster than scanning like `Get()`, we suggest you use it as much as possible. And sonic also provides another API `IndexOrGet()` to underlying use offset as well as ensure the key is matched. +#### SearchOption + +`Searcher` provides some options for user to meet different needs: + +```go +opts := ast.SearchOption{ CopyReturn: true ... } +val, err := sonic.GetWithOptions(JSON, opts, "key") +``` + +- CopyReturn +Indicate the searcher to copy the result JSON string instead of refer from the input. This can help to reduce memory usage if you cache the results +- ConcurentRead +Since `ast.Node` use `Lazy-Load` design, it doesn't support Concurrently-Read by default. If you want to read it concurrently, please specify it. +- ValidateJSON +Indicate the searcher to validate the entire JSON. This option is enabled by default, which slow down the search speed a little. + #### Set/Unset + Modify the json content by Set()/Unset() + ```go import "github.com/bytedance/sonic" @@ -261,7 +318,9 @@ println(root.Get("key4").Check()) // "value not exist" ``` #### Serialize + To encode `ast.Node` as json, use `MarshalJson()` or `json.Marshal()` (MUST pass the node's pointer) + ```go import ( "encoding/json" @@ -275,6 +334,7 @@ println(string(buf) == string(exp)) // true ``` #### APIs + - validation: `Check()`, `Error()`, `Valid()`, `Exist()` - searching: `Index()`, `Get()`, `IndexPair()`, `IndexOrGet()`, `GetByPath()` - go-type casting: `Int64()`, `Float64()`, `String()`, `Number()`, `Bool()`, `Map[UseNumber|UseNode]()`, `Array[UseNumber|UseNode]()`, `Interface[UseNumber|UseNode]()` @@ -282,21 +342,61 @@ println(string(buf) == string(exp)) // true - iteration: `Values()`, `Properties()`, `ForEach()`, `SortKeys()` - modification: `Set()`, `SetByIndex()`, `Add()` +### Ast.Visitor + +Sonic provides an advanced API for fully parsing JSON into non-standard types (neither `struct` not `map[string]interface{}`) without using any intermediate representation (`ast.Node` or `interface{}`). For example, you might have the following types which are like `interface{}` but actually not `interface{}`: + +```go +type UserNode interface {} + +// the following types implement the UserNode interface. +type ( + UserNull struct{} + UserBool struct{ Value bool } + UserInt64 struct{ Value int64 } + UserFloat64 struct{ Value float64 } + UserString struct{ Value string } + UserObject struct{ Value map[string]UserNode } + UserArray struct{ Value []UserNode } +) +``` + +Sonic provides the following API to return **the preorder traversal of a JSON AST**. The `ast.Visitor` is a SAX style interface which is used in some C++ JSON library. You should implement `ast.Visitor` by yourself and pass it to `ast.Preorder()` method. In your visitor you can make your custom types to represent JSON values. There may be an O(n) space container (such as stack) in your visitor to record the object / array hierarchy. + +```go +func Preorder(str string, visitor Visitor, opts *VisitorOptions) error + +type Visitor interface { + OnNull() error + OnBool(v bool) error + OnString(v string) error + OnInt64(v int64, n json.Number) error + OnFloat64(v float64, n json.Number) error + OnObjectBegin(capacity int) error + OnObjectKey(key string) error + OnObjectEnd() error + OnArrayBegin(capacity int) error + OnArrayEnd() error +} +``` + +See [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go) for detailed usage. We also implement a demo visitor for `UserNode` in [ast/visitor_test.go](https://github.com/bytedance/sonic/blob/main/ast/visitor_test.go). + ## Compatibility -Sonic **DOES NOT** ensure to support all environments, due to the difficulty of developing high-performance codes. For developers who use sonic to build their applications in different environments, we have the following suggestions: -- Developing on **Mac M1**: Make sure you have Rosetta 2 installed on your machine, and set `GOARCH=amd64` when building your application. Rosetta 2 can automatically translate x86 binaries to arm64 binaries and run x86 applications on Mac M1. -- Developing on **Linux arm64**: You can install qemu and use the `qemu-x86_64 -cpu max` command to convert x86 binaries to amr64 binaries for applications built with sonic. The qemu can achieve a similar transfer effect to Rosetta 2 on Mac M1. +For developers who want to use sonic to meet diffirent scenarios, we provide some integrated configs as `sonic.API` -For developers who want to use sonic on Linux arm64 without qemu, or those who want to handle JSON strictly consistent with `encoding/json`, we provide some compatible APIs as `sonic.API` -- `ConfigDefault`: the sonic's default config (`EscapeHTML=false`,`SortKeys=false`...) to run on sonic-supporting environment. It will fall back to `encoding/json` with the corresponding config, and some options like `SortKeys=false` will be invalid. -- `ConfigStd`: the std-compatible config (`EscapeHTML=true`,`SortKeys=true`...) to run on sonic-supporting environment. It will fall back to `encoding/json`. -- `ConfigFastest`: the fastest config (`NoQuoteTextMarshaler=true`) to run on sonic-supporting environment. It will fall back to `encoding/json` with the corresponding config, and some options will be invalid. +- `ConfigDefault`: the sonic's default config (`EscapeHTML=false`,`SortKeys=false`...) to run sonic fast meanwhile ensure security. +- `ConfigStd`: the std-compatible config (`EscapeHTML=true`,`SortKeys=true`...) +- `ConfigFastest`: the fastest config (`NoQuoteTextMarshaler=true`) to run on sonic as fast as possible. +Sonic **DOES NOT** ensure to support all environments, due to the difficulty of developing high-performance codes. On non-sonic-supporting environment, the implementation will fall back to `encoding/json`. Thus beflow configs will all equal to `ConfigStd`. ## Tips ### Pretouch + Since Sonic uses [golang-asm](https://github.com/twitchyliquid64/golang-asm) as a JIT assembler, which is NOT very suitable for runtime compiling, first-hit running of a huge schema may cause request-timeout or even process-OOM. For better stability, we advise **using `Pretouch()` for huge-schema or compact-memory applications** before `Marshal()/Unmarshal()`. + ```go import ( "reflect" @@ -311,7 +411,7 @@ func init() { err := sonic.Pretouch(reflect.TypeOf(v)) // with more CompileOption... - err := sonic.Pretouch(reflect.TypeOf(v), + err := sonic.Pretouch(reflect.TypeOf(v), // If the type is too deep nesting (nesting depth > option.DefaultMaxInlineDepth), // you can set compile recursive loops in Pretouch for better stability in JIT. option.WithCompileRecursiveDepth(loop), @@ -322,17 +422,23 @@ func init() { ``` ### Copy string -When decoding **string values without any escaped characters**, sonic references them from the origin JSON buffer instead of mallocing a new buffer to copy. This helps a lot for CPU performance but may leave the whole JSON buffer in memory as long as the decoded objects are being used. In practice, we found the extra memory introduced by referring JSON buffer is usually 20% ~ 80% of decoded objects. Once an application holds these objects for a long time (for example, cache the decoded objects for reusing), its in-use memory on the server may go up. We provide the option `decoder.CopyString()` for users to choose not to reference the JSON buffer, which may cause a decline in CPU performance to some degree. + +When decoding **string values without any escaped characters**, sonic references them from the origin JSON buffer instead of mallocing a new buffer to copy. This helps a lot for CPU performance but may leave the whole JSON buffer in memory as long as the decoded objects are being used. In practice, we found the extra memory introduced by referring JSON buffer is usually 20% ~ 80% of decoded objects. Once an application holds these objects for a long time (for example, cache the decoded objects for reusing), its in-use memory on the server may go up. - `Config.CopyString`/`decoder.CopyString()`: We provide the option for `Decode()` / `Unmarshal()` users to choose not to reference the JSON buffer, which may cause a decline in CPU performance to some degree. + +- `GetFromStringNoCopy()`: For memory safety, `sonic.Get()` / `sonic.GetFromString()` now copies return JSON. If users want to get json more quickly and not care about memory usage, you can use `GetFromStringNoCopy()` to return a JSON directly referenced from source. ### Pass string or []byte? + For alignment to `encoding/json`, we provide API to pass `[]byte` as an argument, but the string-to-bytes copy is conducted at the same time considering safety, which may lose performance when the origin JSON is huge. Therefore, you can use `UnmarshalString()` and `GetFromString()` to pass a string, as long as your origin data is a string or **nocopy-cast** is safe for your []byte. We also provide API `MarshalString()` for convenient **nocopy-cast** of encoded JSON []byte, which is safe since sonic's output bytes is always duplicated and unique. ### Accelerate `encoding.TextMarshaler` -To ensure data security, sonic.Encoder quotes and escapes string values from `encoding.TextMarshaler` interfaces by default, which may degrade performance much if most of your data is in form of them. We provide `encoder.NoQuoteTextMarshaler` to skip these operations, which means you **MUST** ensure their output string escaped and quoted following [RFC8259](https://datatracker.ietf.org/doc/html/rfc8259). +To ensure data security, sonic.Encoder quotes and escapes string values from `encoding.TextMarshaler` interfaces by default, which may degrade performance much if most of your data is in form of them. We provide `encoder.NoQuoteTextMarshaler` to skip these operations, which means you **MUST** ensure their output string escaped and quoted following [RFC8259](https://datatracker.ietf.org/doc/html/rfc8259). ### Better performance for generic data + In **fully-parsed** scenario, `Unmarshal()` performs better than `Get()`+`Node.Interface()`. But if you only have a part of the schema for specific json, you can combine `Get()` and `Unmarshal()` together: + ```go import "github.com/bytedance/sonic" @@ -340,7 +446,9 @@ node, err := sonic.GetFromString(_TwitterJson, "statuses", 3, "user") var user User // your partial schema... err = sonic.UnmarshalString(node.Raw(), &user) ``` + Even if you don't have any schema, use `ast.Node` as the container of generic values instead of `map` or `interface`: + ```go import "github.com/bytedance/sonic" @@ -351,12 +459,37 @@ err = user.Check() // err = user.LoadAll() // only call this when you want to use 'user' concurrently... go someFunc(user) ``` + Why? Because `ast.Node` stores its children using `array`: + - `Array`'s performance is **much better** than `Map` when Inserting (Deserialize) and Scanning (Serialize) data; - **Hashing** (`map[x]`) is not as efficient as **Indexing** (`array[x]`), which `ast.Node` can conduct on **both array and object**; - Using `Interface()`/`Map()` means Sonic must parse all the underlying values, while `ast.Node` can parse them **on demand**. **CAUTION:** `ast.Node` **DOESN'T** ensure concurrent security directly, due to its **lazy-load** design. However, you can call `Node.Load()`/`Node.LoadAll()` to achieve that, which may bring performance reduction while it still works faster than converting to `map` or `interface{}` +### Ast.Node or Ast.Visitor? + +For generic data, `ast.Node` should be enough for your needs in most cases. + +However, `ast.Node` is designed for partially processing JSON string. It has some special designs such as lazy-load which might not be suitable for directly parsing the whole JSON string like `Unmarshal()`. Although `ast.Node` is better then `map` or `interface{}`, it's also a kind of intermediate representation after all if your final types are customized and you have to convert the above types to your custom types after parsing. + +For better performance, in previous case the `ast.Visitor` will be the better choice. It performs JSON decoding like `Unmarshal()` and you can directly use your final types to represents a JSON AST without any intermediate representations. + +But `ast.Visitor` is not a very handy API. You might need to write a lot of code to implement your visitor and carefully maintain the tree hierarchy during decoding. Please read the comments in [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go) carefully if you decide to use this API. + +### Buffer Size + +Sonic use memory pool in many places like `encoder.Encode`, `ast.Node.MarshalJSON` to improve performace, which may produce more memory usage (in-use) when server's load is high. See [issue 614](https://github.com/bytedance/sonic/issues/614). Therefore, we introduce some options to let user control the behavior of memory pool. See [option](https://pkg.go.dev/github.com/bytedance/sonic@v1.11.9/option#pkg-variables) package. + +### Faster JSON skip + +For security, sonic use [FSM](native/skip_one.c) algorithm to validate JSON when decoding raw JSON or encoding `json.Marshaler`, which is much slower (1~10x) than [SIMD-searching-pair](native/skip_one_fast.c) algorithm. If user has many redundant JSON value and DO NOT NEED to strictly validate JSON correctness, you can enable below options: + +- `Config.NoValidateSkipJSON`: for faster skipping JSON when decoding, such as unknown fields, json.Unmarshaler(json.RawMessage), mismatched values, and redundant array elements +- `Config.NoValidateJSONMarshaler`: avoid validating JSON when encoding `json.Marshaler` +- `SearchOption.ValidateJSON`: indicates if validate located JSON value when `Get` + ## Community + Sonic is a subproject of [CloudWeGo](https://www.cloudwego.io/). We are committed to building a cloud native ecosystem. diff --git a/vendor/github.com/bytedance/sonic/README_ZH_CN.md b/vendor/github.com/bytedance/sonic/README_ZH_CN.md index 43fd1d6772f35cd18192467928e2cb67d132d6ad..4f8980c53fbd153db8faef778324b3c9d430368f 100644 --- a/vendor/github.com/bytedance/sonic/README_ZH_CN.md +++ b/vendor/github.com/bytedance/sonic/README_ZH_CN.md @@ -6,9 +6,13 @@ ## 依赖 -- Go 1.15~1.20 -- Linux/MacOS/Windows -- Amd64 架构 +- Go: 1.17~1.23 +- OS: Linux / MacOS / Windows +- CPU: AMD64 / ARM64(需要 Go1.20 以上) + +## 接口 + +详见 [go.dev](https://pkg.go.dev/github.com/bytedance/sonic) ## 特色 @@ -19,7 +23,9 @@ ## 基准测试 对于**所有大小**的 json 和**所有使用场景**, **Sonic 表现均为最佳**。 + - [中型](https://github.com/bytedance/sonic/blob/main/decoder/testdata_test.go#L19) (13kB, 300+ 键, 6 层) + ```powershell goversion: 1.17.1 goos: darwin @@ -79,13 +85,18 @@ BenchmarkSetOne_Jsoniter-16 79475 ns/op 163.8 BenchmarkSetOne_Parallel_Sonic-16 850.9 ns/op 15305.31 MB/s 1584 B/op 17 allocs/op BenchmarkSetOne_Parallel_Sjson-16 18194 ns/op 715.77 MB/s 52247 B/op 9 allocs/op BenchmarkSetOne_Parallel_Jsoniter-16 33560 ns/op 388.05 MB/s 45892 B/op 964 allocs/op +BenchmarkLoadNode/LoadAll()-16 11384 ns/op 1143.93 MB/s 6307 B/op 25 allocs/op +BenchmarkLoadNode_Parallel/LoadAll()-16 5493 ns/op 2370.68 MB/s 7145 B/op 25 allocs/op +BenchmarkLoadNode/Interface()-16 17722 ns/op 734.85 MB/s 13323 B/op 88 allocs/op +BenchmarkLoadNode_Parallel/Interface()-16 10330 ns/op 1260.70 MB/s 15178 B/op 88 allocs/op ``` + - [小型](https://github.com/bytedance/sonic/blob/main/testdata/small.go) (400B, 11 个键, 3 层) ![small benchmarks](./docs/imgs/bench-small.png) - [大型](https://github.com/bytedance/sonic/blob/main/testdata/twitter.json) (635kB, 10000+ 个键, 6 层) ![large benchmarks](./docs/imgs/bench-large.png) -要查看基准测试代码,请参阅 [bench.sh](https://github.com/bytedance/sonic/blob/main/bench.sh) 。 +要查看基准测试代码,请参阅 [bench.sh](https://github.com/bytedance/sonic/blob/main/scripts/bench.sh) 。 ## 工作原理 @@ -96,6 +107,7 @@ BenchmarkSetOne_Parallel_Jsoniter-16 33560 ns/op 388.0 ### 序列化/反序列化 默认的行为基本上与 `encoding/json` 相一致,除了 HTML 转义形式(参见 [Escape HTML](https://github.com/bytedance/sonic/blob/main/README.md#escape-html)) 和 `SortKeys` 功能(参见 [Sort Keys](https://github.com/bytedance/sonic/blob/main/README.md#sort-keys))**没有**遵循 [RFC8259](https://datatracker.ietf.org/doc/html/rfc8259) 。 + ```go import "github.com/bytedance/sonic" @@ -109,7 +121,9 @@ err := sonic.Unmarshal(output, &data) ### 流式输入输出 Sonic 支持解码 `io.Reader` 中输入的 json,或将对象编码为 json 后输出至 `io.Writer`,以处理多个值并减少内存消耗。 + - 编码器 + ```go var o1 = map[string]interface{}{ "a": "b", @@ -124,7 +138,9 @@ fmt.Println(w.String()) // {"a":"b"} // 1 ``` + - 解码器 + ```go var o = map[string]interface{}{} var r = strings.NewReader(`{"a":"b"}{"1":"2"}`) @@ -168,6 +184,7 @@ fm := root.Interface().(float64) // jn == jm ### 对键排序 考虑到排序带来的性能损失(约 10% ), sonic 默认不会启用这个功能。如果你的组件依赖这个行为(如 [zstd](https://github.com/facebook/zstd)) ,可以仿照下面的例子: + ```go import "github.com/bytedance/sonic" import "github.com/bytedance/sonic/encoder" @@ -184,6 +201,7 @@ err := root.SortKeys() ### HTML 转义 考虑到性能损失(约15%), sonic 默认不会启用这个功能。你可以使用 `encoder.EscapeHTML` 选项来开启(与 `encoding/json.HTMLEscape` 行为一致)。 + ```go import "github.com/bytedance/sonic" @@ -192,11 +210,13 @@ ret, err := Encode(v, EscapeHTML) // ret == `{"\u0026\u0026":{"X":"\u003c\u003e" ``` ### 紧凑格式 + Sonic 默认将基本类型( `struct` , `map` 等)编码为紧凑格式的 JSON ,除非使用 `json.RawMessage` or `json.Marshaler` 进行编码: sonic 确保输出的 JSON 合法,但出于性能考虑,**不会**加工成紧凑格式。我们提供选项 `encoder.CompactMarshaler` 来添加此过程, ### 打印错误 如果输入的 JSON 存在无效的语法,sonic 将返回 `decoder.SyntaxError`,该错误支持错误位置的美化输出。 + ```go import "github.com/bytedance/sonic" import "github.com/bytedance/sonic/decoder" @@ -224,6 +244,7 @@ if err != nil { #### 类型不匹配 [Sonic v1.6.0] 如果给定键中存在**类型不匹配**的值, sonic 会抛出 `decoder.MismatchTypeError` (如果有多个,只会报告最后一个),但仍会跳过错误的值并解码下一个 JSON 。 + ```go import "github.com/bytedance/sonic" import "github.com/bytedance/sonic/decoder" @@ -236,13 +257,15 @@ err := UnmarshalString(`{"A":"1","B":1}`, &data) println(err.Error()) // Mismatch type int with value string "at index 5: mismatched type with value\n\n\t{\"A\":\"1\",\"B\":1}\n\t.....^.........\n" fmt.Printf("%+v", data) // {A:0 B:1} ``` + ### `Ast.Node` -Sonic/ast.Node 是完全独立的 JSON 抽象语法树库。它实现了序列化和反序列化,并提供了获取和修改通用数据的鲁棒的 API。 +Sonic/ast.Node 是完全独立的 JSON 抽象语法树库。它实现了序列化和反序列化,并提供了获取和修改JSON数据的鲁棒的 API。 #### 查找/索引 通过给定的路径搜索 JSON 片段,路径必须为非负整数,字符串或 `nil` 。 + ```go import "github.com/bytedance/sonic" @@ -256,11 +279,29 @@ raw := root.Raw() // == string(input) root, err := sonic.Get(input, "key1", 1, "key2") sub := root.Get("key3").Index(2).Int64() // == 3 ``` + **注意**:由于 `Index()` 使用偏移量来定位数据,比使用扫描的 `Get()` 要快的多,建议尽可能的使用 `Index` 。 Sonic 也提供了另一个 API, `IndexOrGet()` ,以偏移量为基础并且也确保键的匹配。 +#### 查找选项 + +`ast.Searcher`提供了一些选项,以满足用户的不同需求: + +``` +opts:= ast.SearchOption{CopyReturn: true…} +Val, err:= sonic。gettwithoptions (JSON, opts, "key") +``` + +- CopyReturn +指示搜索器复制结果JSON字符串,而不是从输入引用。如果用户缓存结果,这有助于减少内存使用 +- ConcurentRead +因为`ast.Node`使用`Lazy-Load`设计,默认不支持并发读取。如果您想同时读取,请指定它。 +- ValidateJSON +指示搜索器来验证整个JSON。默认情况下启用该选项, 但是对于查找速度有一定影响。 + #### 修改 -使用 ` Set()` / `Unset()` 修改 json 的内容 +使用 `Set()` / `Unset()` 修改 json 的内容 + ```go import "github.com/bytedance/sonic" @@ -277,7 +318,9 @@ println(root.Get("key4").Check()) // "value not exist" ``` #### 序列化 + 要将 `ast.Node` 编码为 json ,使用 `MarshalJson()` 或者 `json.Marshal()` (必须传递指向节点的指针) + ```go import ( "encoding/json" @@ -291,6 +334,7 @@ println(string(buf) == string(exp)) // true ``` #### APIs + - 合法性检查: `Check()`, `Error()`, `Valid()`, `Exist()` - 索引: `Index()`, `Get()`, `IndexPair()`, `IndexOrGet()`, `GetByPath()` - 转换至 go 内置类型: `Int64()`, `Float64()`, `String()`, `Number()`, `Bool()`, `Map[UseNumber|UseNode]()`, `Array[UseNumber|UseNode]()`, `Interface[UseNumber|UseNode]()` @@ -298,21 +342,61 @@ println(string(buf) == string(exp)) // true - 迭代: `Values()`, `Properties()`, `ForEach()`, `SortKeys()` - 修改: `Set()`, `SetByIndex()`, `Add()` +### `Ast.Visitor` + +Sonic 提供了一个高级的 API 用于直接全量解析 JSON 到非标准容器里 (既不是 `struct` 也不是 `map[string]interface{}`) 且不需要借助任何中间表示 (`ast.Node` 或 `interface{}`)。举个例子,你可能定义了下述的类型,它们看起来像 `interface{}`,但实际上并不是: + +```go +type UserNode interface {} + +// the following types implement the UserNode interface. +type ( + UserNull struct{} + UserBool struct{ Value bool } + UserInt64 struct{ Value int64 } + UserFloat64 struct{ Value float64 } + UserString struct{ Value string } + UserObject struct{ Value map[string]UserNode } + UserArray struct{ Value []UserNode } +) +``` + +Sonic 提供了下述的 API 来返回 **“对 JSON AST 的前序遍历”**。`ast.Visitor` 是一个 SAX 风格的接口,这在某些 C++ 的 JSON 解析库中被使用到。你需要自己实现一个 `ast.Visitor`,将它传递给 `ast.Preorder()` 方法。在你的实现中你可以使用自定义的类型来表示 JSON 的值。在你的 `ast.Visitor` 中,可能需要有一个 O(n) 空间复杂度的容器(比如说栈)来记录 object / array 的层级。 + +```go +func Preorder(str string, visitor Visitor, opts *VisitorOptions) error + +type Visitor interface { + OnNull() error + OnBool(v bool) error + OnString(v string) error + OnInt64(v int64, n json.Number) error + OnFloat64(v float64, n json.Number) error + OnObjectBegin(capacity int) error + OnObjectKey(key string) error + OnObjectEnd() error + OnArrayBegin(capacity int) error + OnArrayEnd() error +} +``` + +详细用法参看 [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go),我们还为 `UserNode` 实现了一个示例 `ast.Visitor`,你可以在 [ast/visitor_test.go](https://github.com/bytedance/sonic/blob/main/ast/visitor_test.go) 中找到它。 + ## 兼容性 -由于开发高性能代码的困难性, Sonic **不**保证对所有环境的支持。对于在不同环境中使用 Sonic 构建应用程序的开发者,我们有以下建议: -- 在 **Mac M1** 上开发:确保在您的计算机上安装了 Rosetta 2,并在构建时设置 `GOARCH=amd64` 。 Rosetta 2 可以自动将 x86 二进制文件转换为 arm64 二进制文件,并在 Mac M1 上运行 x86 应用程序。 -- 在 **Linux arm64** 上开发:您可以安装 qemu 并使用 `qemu-x86_64 -cpu max` 命令来将 x86 二进制文件转换为 arm64 二进制文件。qemu可以实现与Mac M1上的Rosetta 2类似的转换效果。 +对于想要使用sonic来满足不同场景的开发人员,我们提供了一些集成配置: -对于希望在不使用 qemu 下使用 sonic 的开发者,或者希望处理 JSON 时与 `encoding/JSON` 严格保持一致的开发者,我们在 `sonic.API` 中提供了一些兼容性 API -- `ConfigDefault`: 在支持 sonic 的环境下 sonic 的默认配置(`EscapeHTML=false`,`SortKeys=false`等)。行为与具有相应配置的 `encoding/json` 一致,一些选项,如 `SortKeys=false` 将无效。 -- `ConfigStd`: 在支持 sonic 的环境下与标准库兼容的配置(`EscapeHTML=true`,`SortKeys=true`等)。行为与 `encoding/json` 一致。 -- `ConfigFastest`: 在支持 sonic 的环境下运行最快的配置(`NoQuoteTextMarshaler=true`)。行为与具有相应配置的 `encoding/json` 一致,某些选项将无效。 +- `ConfigDefault`: sonic的默认配置 (`EscapeHTML=false`, `SortKeys=false`…) 保证性能同时兼顾安全性。 +- `ConfigStd`: 与 `encoding/json` 保证完全兼容的配置 +- `ConfigFastest`: 最快的配置(`NoQuoteTextMarshaler=true...`) 保证性能最优但是会缺少一些安全性检查(validate UTF8 等) +Sonic **不**确保支持所有环境,由于开发高性能代码的困难。在不支持声音的环境中,实现将回落到 `encoding/json`。因此上述配置将全部等于`ConfigStd`。 ## 注意事项 ### 预热 + 由于 Sonic 使用 [golang-asm](https://github.com/twitchyliquid64/golang-asm) 作为 JIT 汇编器,这个库并不适用于运行时编译,第一次运行一个大型模式可能会导致请求超时甚至进程内存溢出。为了更好地稳定性,我们建议在运行大型模式或在内存有限的应用中,在使用 `Marshal()/Unmarshal()` 前运行 `Pretouch()`。 + ```go import ( "reflect" @@ -327,7 +411,7 @@ func init() { err := sonic.Pretouch(reflect.TypeOf(v)) // with more CompileOption... - err := sonic.Pretouch(reflect.TypeOf(v), + err := sonic.Pretouch(reflect.TypeOf(v), // If the type is too deep nesting (nesting depth > option.DefaultMaxInlineDepth), // you can set compile recursive loops in Pretouch for better stability in JIT. option.WithCompileRecursiveDepth(loop), @@ -342,16 +426,17 @@ func init() { 当解码 **没有转义字符的字符串**时, sonic 会从原始的 JSON 缓冲区内引用而不是复制到新的一个缓冲区中。这对 CPU 的性能方面很有帮助,但是可能因此在解码后对象仍在使用的时候将整个 JSON 缓冲区保留在内存中。实践中我们发现,通过引用 JSON 缓冲区引入的额外内存通常是解码后对象的 20% 至 80% ,一旦应用长期保留这些对象(如缓存以备重用),服务器所使用的内存可能会增加。我们提供了选项 `decoder.CopyString()` 供用户选择,不引用 JSON 缓冲区。这可能在一定程度上降低 CPU 性能。 ### 传递字符串还是字节数组? + 为了和 `encoding/json` 保持一致,我们提供了传递 `[]byte` 作为参数的 API ,但考虑到安全性,字符串到字节的复制是同时进行的,这在原始 JSON 非常大时可能会导致性能损失。因此,你可以使用 `UnmarshalString()` 和 `GetFromString()` 来传递字符串,只要你的原始数据是字符串,或**零拷贝类型转换**对于你的字节数组是安全的。我们也提供了 `MarshalString()` 的 API ,以便对编码的 JSON 字节数组进行**零拷贝类型转换**,因为 sonic 输出的字节始终是重复并且唯一的,所以这样是安全的。 ### 加速 `encoding.TextMarshaler` 为了保证数据安全性, `sonic.Encoder` 默认会对来自 `encoding.TextMarshaler` 接口的字符串进行引用和转义,如果大部分数据都是这种形式那可能会导致很大的性能损失。我们提供了 `encoder.NoQuoteTextMarshaler` 选项来跳过这些操作,但你**必须**保证他们的输出字符串依照 [RFC8259](https://datatracker.ietf.org/doc/html/rfc8259) 进行了转义和引用。 - ### 泛型的性能优化 在 **完全解析**的场景下, `Unmarshal()` 表现得比 `Get()`+`Node.Interface()` 更好。但是如果你只有特定 JSON 的部分模式,你可以将 `Get()` 和 `Unmarshal()` 结合使用: + ```go import "github.com/bytedance/sonic" @@ -359,7 +444,9 @@ node, err := sonic.GetFromString(_TwitterJson, "statuses", 3, "user") var user User // your partial schema... err = sonic.UnmarshalString(node.Raw(), &user) ``` + 甚至如果你没有任何模式,可以用 `ast.Node` 代替 `map` 或 `interface` 作为泛型的容器: + ```go import "github.com/bytedance/sonic" @@ -370,13 +457,37 @@ err = user.Check() // err = user.LoadAll() // only call this when you want to use 'user' concurrently... go someFunc(user) ``` + 为什么?因为 `ast.Node` 使用 `array` 来存储其子节点: + - 在插入(反序列化)和扫描(序列化)数据时,`Array` 的性能比 `Map` **好得多**; - **哈希**(`map[x]`)的效率不如**索引**(`array[x]`)高效,而 `ast.Node` 可以在数组和对象上使用索引; - 使用 `Interface()` / `Map()` 意味着 sonic 必须解析所有的底层值,而 `ast.Node` 可以**按需解析**它们。 **注意**:由于 `ast.Node` 的惰性加载设计,其**不能**直接保证并发安全性,但你可以调用 `Node.Load()` / `Node.LoadAll()` 来实现并发安全。尽管可能会带来性能损失,但仍比转换成 `map` 或 `interface{}` 更为高效。 +### 使用 `ast.Node` 还是 `ast.Visitor`? + +对于泛型数据的解析,`ast.Node` 在大多数场景上应该能够满足你的需求。 + +然而,`ast.Node` 是一种针对部分解析 JSON 而设计的泛型容器,它包含一些特殊设计,比如惰性加载,如果你希望像 `Unmarshal()` 那样直接解析整个 JSON,这些设计可能并不合适。尽管 `ast.Node` 相较于 `map` 或 `interface{}` 来说是更好的一种泛型容器,但它毕竟也是一种中间表示,如果你的最终类型是自定义的,你还得在解析完成后将上述类型转化成你自定义的类型。 + +在上述场景中,如果想要有更极致的性能,`ast.Visitor` 会是更好的选择。它采用和 `Unmarshal()` 类似的形式解析 JSON,并且你可以直接使用你的最终类型去表示 JSON AST,而不需要经过额外的任何中间表示。 + +但是,`ast.Visitor` 并不是一个很易用的 API。你可能需要写大量的代码去实现自己的 `ast.Visitor`,并且需要在解析过程中仔细维护树的层级。如果你决定要使用这个 API,请先仔细阅读 [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go) 中的注释。 + +### 缓冲区大小 + +Sonic在许多地方使用内存池,如`encoder.Encode`, `ast.Node.MarshalJSON`等来提高性能,这可能会在服务器负载高时产生更多的内存使用(in-use)。参见[issue 614](https://github.com/bytedance/sonic/issues/614)。因此,我们引入了一些选项来让用户配置内存池的行为。参见[option](https://pkg.go.dev/github.com/bytedance/sonic@v1.11.9/option#pkg-variables)包。 + +### 更快的 JSON Skip + +为了安全起见,在跳过原始JSON 时,sonic decoder 默认使用[FSM](native/skip_one.c)算法扫描来跳过同时校验 JSON。它相比[SIMD-searching-pair](native/skip_one_fast.c)算法跳过要慢得多(1~10倍)。如果用户有很多冗余的JSON值,并且不需要严格验证JSON的正确性,你可以启用以下选项: + +- `Config.NoValidateSkipJSON`: 用于在解码时更快地跳过JSON,例如未知字段,`json.RawMessage`,不匹配的值和冗余的数组元素等 +- `Config.NoValidateJSONMarshaler`: 编码JSON时避免验证JSON。封送拆收器 +- `SearchOption.ValidateJSON`: 指示当`Get`时是否验证定位的JSON值 + ## 社区 Sonic 是 [CloudWeGo](https://www.cloudwego.io/) 下的一个子项目。我们致力于构建云原生生态系统。 diff --git a/vendor/github.com/bytedance/sonic/api.go b/vendor/github.com/bytedance/sonic/api.go index a042476f1648d5b34bf4b796c745aa408a356cc6..af6be70a42527c8b6dd013addbdc59791fb35574 100644 --- a/vendor/github.com/bytedance/sonic/api.go +++ b/vendor/github.com/bytedance/sonic/api.go @@ -20,8 +20,19 @@ import ( `io` `github.com/bytedance/sonic/ast` + `github.com/bytedance/sonic/internal/rt` ) +const ( + // UseStdJSON indicates you are using fallback implementation (encoding/json) + UseStdJSON = iota + // UseSonicJSON indicates you are using real sonic implementation + UseSonicJSON +) + +// APIKind is the kind of API, 0 is std json, 1 is sonic. +const APIKind = apiKind + // Config is a combination of sonic/encoder.Options and sonic/decoder.Options type Config struct { // EscapeHTML indicates encoder to escape all HTML characters @@ -68,11 +79,25 @@ type Config struct { // ValidateString indicates decoder and encoder to valid string values: decoder will return errors // when unescaped control chars(\u0000-\u001f) in the string value of JSON. - ValidateString bool + ValidateString bool + + // NoValidateJSONMarshaler indicates that the encoder should not validate the output string + // after encoding the JSONMarshaler to JSON. + NoValidateJSONMarshaler bool + + // NoValidateJSONSkip indicates the decoder should not validate the JSON value when skipping it, + // such as unknown-fields, mismatched-type, redundant elements.. + NoValidateJSONSkip bool + + // NoEncoderNewline indicates that the encoder should not add a newline after every message + NoEncoderNewline bool + + // Encode Infinity or Nan float into `null`, instead of returning an error. + EncodeNullForInfOrNan bool } var ( - // ConfigDefault is the default config of APIs, aiming at efficiency and safty. + // ConfigDefault is the default config of APIs, aiming at efficiency and safety. ConfigDefault = Config{}.Froze() // ConfigStd is the standard config of APIs, aiming at being compatible with encoding/json. @@ -87,6 +112,8 @@ var ( // ConfigFastest is the fastest config of APIs, aiming at speed. ConfigFastest = Config{ NoQuoteTextMarshaler: true, + NoValidateJSONMarshaler: true, + NoValidateJSONSkip: true, }.Froze() ) @@ -109,7 +136,7 @@ type API interface { NewEncoder(writer io.Writer) Encoder // NewDecoder create a Decoder holding reader NewDecoder(reader io.Reader) Decoder - // Valid validates the JSON-encoded bytes and reportes if it is valid + // Valid validates the JSON-encoded bytes and reports if it is valid Valid(data []byte) bool } @@ -148,6 +175,13 @@ func Marshal(val interface{}) ([]byte, error) { return ConfigDefault.Marshal(val) } +// MarshalIndent is like Marshal but applies Indent to format the output. +// Each JSON element in the output will begin on a new line beginning with prefix +// followed by one or more copies of indent according to the indentation nesting. +func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) { + return ConfigDefault.MarshalIndent(v, prefix, indent) +} + // MarshalString returns the JSON encoding string of v. func MarshalString(val interface{}) (string, error) { return ConfigDefault.MarshalToString(val) @@ -165,22 +199,49 @@ func UnmarshalString(buf string, val interface{}) error { return ConfigDefault.UnmarshalFromString(buf, val) } -// Get searches the given path from json, -// and returns its representing ast.Node. +// Get searches and locates the given path from src json, +// and returns a ast.Node representing the partially json. // // Each path arg must be integer or string: // - Integer is target index(>=0), means searching current node as array. // - String is target key, means searching current node as object. // // -// Note, the api expects the json is well-formed at least, -// otherwise it may return unexpected result. +// Notice: It expects the src json is **Well-formed** and **Immutable** when calling, +// otherwise it may return unexpected result. +// Considering memory safety, the returned JSON is **Copied** from the input func Get(src []byte, path ...interface{}) (ast.Node, error) { - return GetFromString(string(src), path...) + return GetCopyFromString(rt.Mem2Str(src), path...) +} + +//GetWithOptions searches and locates the given path from src json, +// with specific options of ast.Searcher +func GetWithOptions(src []byte, opts ast.SearchOptions, path ...interface{}) (ast.Node, error) { + s := ast.NewSearcher(rt.Mem2Str(src)) + s.SearchOptions = opts + return s.GetByPath(path...) } -// GetFromString is same with Get except src is string, -// which can reduce unnecessary memory copy. +// GetFromString is same with Get except src is string. +// +// WARNING: The returned JSON is **Referenced** from the input. +// Caching or long-time holding the returned node may cause OOM. +// If your src is big, consider use GetFromStringCopy(). func GetFromString(src string, path ...interface{}) (ast.Node, error) { return ast.NewSearcher(src).GetByPath(path...) -} \ No newline at end of file +} + +// GetCopyFromString is same with Get except src is string +func GetCopyFromString(src string, path ...interface{}) (ast.Node, error) { + return ast.NewSearcher(src).GetByPathCopy(path...) +} + +// Valid reports whether data is a valid JSON encoding. +func Valid(data []byte) bool { + return ConfigDefault.Valid(data) +} + +// Valid reports whether data is a valid JSON encoding. +func ValidString(data string) bool { + return ConfigDefault.Valid(rt.Str2Mem(data)) +} diff --git a/vendor/github.com/bytedance/sonic/ast/api_amd64.go b/vendor/github.com/bytedance/sonic/ast/api.go similarity index 69% rename from vendor/github.com/bytedance/sonic/ast/api_amd64.go rename to vendor/github.com/bytedance/sonic/ast/api.go index 3047f59c303410ae95fbf37fc3f01035acf3ecd7..7c8253aa118968a5b9a30b5ab2c13fdc0615ae50 100644 --- a/vendor/github.com/bytedance/sonic/ast/api_amd64.go +++ b/vendor/github.com/bytedance/sonic/ast/api.go @@ -1,4 +1,5 @@ -// +build amd64,go1.15,!go1.21 +//go:build (amd64 && go1.17 && !go1.24) || (arm64 && go1.20 && !go1.24) +// +build amd64,go1.17,!go1.24 arm64,go1.20,!go1.24 /* * Copyright 2022 ByteDance Inc. @@ -27,7 +28,7 @@ import ( `github.com/bytedance/sonic/internal/native/types` `github.com/bytedance/sonic/internal/rt` uq `github.com/bytedance/sonic/unquote` - `github.com/chenzhuoyu/base64x` + `github.com/bytedance/sonic/utf8` ) var typeByte = rt.UnpackEface(byte(0)).Type @@ -60,7 +61,7 @@ func quote(buf *[]byte, val string) { } // double buf size - *b = growslice(typeByte, *b, b.Cap*2) + *b = rt.GrowSlice(typeByte, *b, b.Cap*2) // ret is the complement of consumed input ret = ^ret // update input buffer @@ -77,17 +78,15 @@ func unquote(src string) (string, types.ParsingError) { return uq.String(src) } -func decodeBase64(src string) ([]byte, error) { - return base64x.StdEncoding.DecodeString(src) -} - -func encodeBase64(src []byte) string { - return base64x.StdEncoding.EncodeToString(src) -} - func (self *Parser) decodeValue() (val types.JsonState) { sv := (*rt.GoString)(unsafe.Pointer(&self.s)) - self.p = native.Value(sv.Ptr, sv.Len, self.p, &val, 0) + flag := types.F_USE_NUMBER + if self.dbuf != nil { + flag = 0 + val.Dbuf = self.dbuf + val.Dcap = types.MaxDigitNums + } + self.p = native.Value(sv.Ptr, sv.Len, self.p, &val, uint64(flag)) return } @@ -104,7 +103,7 @@ func (self *Parser) skip() (int, types.ParsingError) { func (self *Node) encodeInterface(buf *[]byte) error { //WARN: NOT compatible with json.Encoder - return encoder.EncodeInto(buf, self.packAny(), 0) + return encoder.EncodeInto(buf, self.packAny(), encoder.NoEncoderNewline) } func (self *Parser) skipFast() (int, types.ParsingError) { @@ -115,10 +114,15 @@ func (self *Parser) skipFast() (int, types.ParsingError) { return start, 0 } -func (self *Parser) getByPath(path ...interface{}) (int, types.ParsingError) { - fsm := types.NewStateMachine() +func (self *Parser) getByPath(validate bool, path ...interface{}) (int, types.ParsingError) { + var fsm *types.StateMachine + if validate { + fsm = types.NewStateMachine() + } start := native.GetByPath(&self.s, &self.p, &path, fsm) - types.FreeStateMachine(fsm) + if validate { + types.FreeStateMachine(fsm) + } runtime.KeepAlive(path) if start < 0 { return self.p, types.ParsingError(-start) @@ -126,26 +130,6 @@ func (self *Parser) getByPath(path ...interface{}) (int, types.ParsingError) { return start, 0 } -func (self *Searcher) GetByPath(path ...interface{}) (Node, error) { - var err types.ParsingError - var start int - - self.parser.p = 0 - start, err = self.parser.getByPath(path...) - if err != 0 { - // for compatibility with old version - if err == types.ERR_NOT_FOUND { - return Node{}, ErrNotExist - } - if err == types.ERR_UNSUPPORT_TYPE { - panic("path must be either int(>=0) or string") - } - return Node{}, self.parser.syntaxError(err) - } - - t := switchRawType(self.parser.s[start]) - if t == _V_NONE { - return Node{}, self.parser.ExportError(err) - } - return newRawNode(self.parser.s[start:self.parser.p], t), nil -} \ No newline at end of file +func validate_utf8(str string) bool { + return utf8.ValidateString(str) +} diff --git a/vendor/github.com/bytedance/sonic/ast/api_compat.go b/vendor/github.com/bytedance/sonic/ast/api_compat.go index b18b5ae8c6a7ca30771a83810e95e9eb1e980d9e..a349afc0b96210b03408083f1f77ade84b909ae7 100644 --- a/vendor/github.com/bytedance/sonic/ast/api_compat.go +++ b/vendor/github.com/bytedance/sonic/ast/api_compat.go @@ -1,36 +1,40 @@ -// +build !amd64 go1.21 +// +build !amd64,!arm64 go1.24 !go1.17 arm64,!go1.20 /* - * Copyright 2022 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +* Copyright 2022 ByteDance Inc. +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ package ast import ( - `encoding/base64` `encoding/json` - `fmt` + `unicode/utf8` `github.com/bytedance/sonic/internal/native/types` `github.com/bytedance/sonic/internal/rt` ) +func init() { + println("WARNING:(ast) sonic only supports go1.17~1.23, but your environment is not suitable") +} + func quote(buf *[]byte, val string) { quoteString(buf, val) } +// unquote unescapes a internal JSON string (it doesn't count quotas at the begining and end) func unquote(src string) (string, types.ParsingError) { sp := rt.IndexChar(src, -1) out, ok := unquoteBytes(rt.BytesFrom(sp, len(src)+2, len(src)+2)) @@ -40,16 +44,9 @@ func unquote(src string) (string, types.ParsingError) { return rt.Mem2Str(out), 0 } -func decodeBase64(src string) ([]byte, error) { - return base64.StdEncoding.DecodeString(src) -} - -func encodeBase64(src []byte) string { - return base64.StdEncoding.EncodeToString(src) -} func (self *Parser) decodeValue() (val types.JsonState) { - e, v := decodeValue(self.s, self.p) + e, v := decodeValue(self.s, self.p, self.dbuf == nil) if e < 0 { return v } @@ -84,37 +81,34 @@ func (self *Node) encodeInterface(buf *[]byte) error { return nil } -func (self *Searcher) GetByPath(path ...interface{}) (Node, error) { - self.parser.p = 0 - - var err types.ParsingError +func (self *Parser) getByPath(validate bool, path ...interface{}) (int, types.ParsingError) { for _, p := range path { if idx, ok := p.(int); ok && idx >= 0 { - if err = self.parser.searchIndex(idx); err != 0 { - return Node{}, self.parser.ExportError(err) + if err := self.searchIndex(idx); err != 0 { + return self.p, err } } else if key, ok := p.(string); ok { - if err = self.parser.searchKey(key); err != 0 { - return Node{}, self.parser.ExportError(err) + if err := self.searchKey(key); err != 0 { + return self.p, err } } else { panic("path must be either int(>=0) or string") } } - var start = self.parser.p - if start, err = self.parser.skip(); err != 0 { - return Node{}, self.parser.ExportError(err) - } - ns := len(self.parser.s) - if self.parser.p > ns || start >= ns || start>=self.parser.p { - return Node{}, fmt.Errorf("skip %d char out of json boundary", start) + var start int + var e types.ParsingError + if validate { + start, e = self.skip() + } else { + start, e = self.skipFast() } - - t := switchRawType(self.parser.s[start]) - if t == _V_NONE { - return Node{}, self.parser.ExportError(err) + if e != 0 { + return self.p, e } + return start, 0 +} - return newRawNode(self.parser.s[start:self.parser.p], t), nil -} \ No newline at end of file +func validate_utf8(str string) bool { + return utf8.ValidString(str) +} diff --git a/vendor/github.com/bytedance/sonic/ast/buffer.go b/vendor/github.com/bytedance/sonic/ast/buffer.go new file mode 100644 index 0000000000000000000000000000000000000000..04701ef5b3a29743d806d0ad73b3576123ba2d86 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/ast/buffer.go @@ -0,0 +1,470 @@ +/** + * Copyright 2023 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package ast + +import ( + "sort" + "unsafe" + + "github.com/bytedance/sonic/internal/caching" +) + +type nodeChunk [_DEFAULT_NODE_CAP]Node + +type linkedNodes struct { + head nodeChunk + tail []*nodeChunk + size int +} + +func (self *linkedNodes) Cap() int { + if self == nil { + return 0 + } + return (len(self.tail)+1)*_DEFAULT_NODE_CAP +} + +func (self *linkedNodes) Len() int { + if self == nil { + return 0 + } + return self.size +} + +func (self *linkedNodes) At(i int) (*Node) { + if self == nil { + return nil + } + if i >= 0 && i= _DEFAULT_NODE_CAP && i= self.size || target < 0 || target >= self.size { + return + } + // reserve source + n := *self.At(source) + if source < target { + // move every element (source,target] one step back + for i:=source; itarget; i-- { + *self.At(i) = *self.At(i-1) + } + } + // set target + *self.At(target) = n +} + +func (self *linkedNodes) Pop() { + if self == nil || self.size == 0 { + return + } + self.Set(self.size-1, Node{}) + self.size-- +} + +func (self *linkedNodes) Push(v Node) { + self.Set(self.size, v) +} + + +func (self *linkedNodes) Set(i int, v Node) { + if i < _DEFAULT_NODE_CAP { + self.head[i] = v + if self.size <= i { + self.size = i+1 + } + return + } + a, b := i/_DEFAULT_NODE_CAP-1, i%_DEFAULT_NODE_CAP + if a < 0 { + self.head[b] = v + } else { + self.growTailLength(a+1) + var n = &self.tail[a] + if *n == nil { + *n = new(nodeChunk) + } + (*n)[b] = v + } + if self.size <= i { + self.size = i+1 + } +} + +func (self *linkedNodes) growTailLength(l int) { + if l <= len(self.tail) { + return + } + c := cap(self.tail) + for c < l { + c += 1 + c>>_APPEND_GROW_SHIFT + } + if c == cap(self.tail) { + self.tail = self.tail[:l] + return + } + tmp := make([]*nodeChunk, l, c) + copy(tmp, self.tail) + self.tail = tmp +} + +func (self *linkedNodes) ToSlice(con []Node) { + if len(con) < self.size { + return + } + i := (self.size-1) + a, b := i/_DEFAULT_NODE_CAP-1, i%_DEFAULT_NODE_CAP + if a < 0 { + copy(con, self.head[:b+1]) + return + } else { + copy(con, self.head[:]) + con = con[_DEFAULT_NODE_CAP:] + } + + for i:=0; i>_APPEND_GROW_SHIFT + self.tail = make([]*nodeChunk, a+1, c) + } + self.tail = self.tail[:a+1] + + for i:=0; i= 0 && i < _DEFAULT_NODE_CAP && i= _DEFAULT_NODE_CAP && i>_APPEND_GROW_SHIFT + } + if c == cap(self.tail) { + self.tail = self.tail[:l] + return + } + tmp := make([]*pairChunk, l, c) + copy(tmp, self.tail) + self.tail = tmp +} + +// linear search +func (self *linkedPairs) Get(key string) (*Pair, int) { + if self.index != nil { + // fast-path + i, ok := self.index[caching.StrHash(key)] + if ok { + n := self.At(i) + if n.Key == key { + return n, i + } + // hash conflicts + goto linear_search + } else { + return nil, -1 + } + } +linear_search: + for i:=0; i>_APPEND_GROW_SHIFT + self.tail = make([]*pairChunk, a+1, c) + } + self.tail = self.tail[:a+1] + + for i:=0; i len(b) { + l = len(b) + } + for i := d; i < l; i++ { + if a[i] == b[i] { + continue + } + return a[i] < b[i] + } + return len(a) < len(b) +} + +type parseObjectStack struct { + parser Parser + v linkedPairs +} + +type parseArrayStack struct { + parser Parser + v linkedNodes +} + +func newLazyArray(p *Parser) Node { + s := new(parseArrayStack) + s.parser = *p + return Node{ + t: _V_ARRAY_LAZY, + p: unsafe.Pointer(s), + } +} + +func newLazyObject(p *Parser) Node { + s := new(parseObjectStack) + s.parser = *p + return Node{ + t: _V_OBJECT_LAZY, + p: unsafe.Pointer(s), + } +} + +func (self *Node) getParserAndArrayStack() (*Parser, *parseArrayStack) { + stack := (*parseArrayStack)(self.p) + return &stack.parser, stack +} + +func (self *Node) getParserAndObjectStack() (*Parser, *parseObjectStack) { + stack := (*parseObjectStack)(self.p) + return &stack.parser, stack +} + diff --git a/vendor/github.com/bytedance/sonic/ast/decode.go b/vendor/github.com/bytedance/sonic/ast/decode.go index 6a5f6fea332ee3e3534b947de7e3455df4cc594a..27aaf140882de288870181b4215d9169a9d04e60 100644 --- a/vendor/github.com/bytedance/sonic/ast/decode.go +++ b/vendor/github.com/bytedance/sonic/ast/decode.go @@ -17,19 +17,23 @@ package ast import ( - `encoding/base64` - `runtime` - `strconv` - `unsafe` - - `github.com/bytedance/sonic/internal/native/types` - `github.com/bytedance/sonic/internal/rt` + "encoding/base64" + "runtime" + "strconv" + "unsafe" + + "github.com/bytedance/sonic/internal/native/types" + "github.com/bytedance/sonic/internal/rt" + "github.com/bytedance/sonic/internal/utils" ) -const _blankCharsMask = (1 << ' ') | (1 << '\t') | (1 << '\r') | (1 << '\n') +// Hack: this is used for both checking space and cause firendly compile errors in 32-bit arch. +const _Sonic_Not_Support_32Bit_Arch__Checking_32Bit_Arch_Here = (1 << ' ') | (1 << '\t') | (1 << '\r') | (1 << '\n') + +var bytesNull = []byte("null") const ( - bytesNull = "null" + strNull = "null" bytesTrue = "true" bytesFalse = "false" bytesObject = "{}" @@ -37,7 +41,7 @@ const ( ) func isSpace(c byte) bool { - return (int(1< len(src) { return -int(types.ERR_EOF) } - if src[pos:ret] == bytesNull { + if src[pos:ret] == strNull { return ret } else { return -int(types.ERR_INVALID_CHAR) @@ -220,7 +224,7 @@ func decodeFloat64(src string, pos int) (ret int, v float64, err error) { return ret, v, nil } -func decodeValue(src string, pos int) (ret int, v types.JsonState) { +func decodeValue(src string, pos int, skipnum bool) (ret int, v types.JsonState) { pos = skipBlank(src, pos) if pos < 0 { return pos, types.JsonState{Vt: types.ValueType(pos)} @@ -256,20 +260,30 @@ func decodeValue(src string, pos int) (ret int, v types.JsonState) { } return ret, types.JsonState{Vt: types.V_FALSE} case '-', '+', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': - var iv int64 - ret, iv, _ = decodeInt64(src, pos) - if ret >= 0 { - return ret, types.JsonState{Vt: types.V_INTEGER, Iv: iv, Ep: pos} - } else if ret != -int(types.ERR_INVALID_NUMBER_FMT) { - return ret, types.JsonState{Vt: types.ValueType(ret)} - } - var fv float64 - ret, fv, _ = decodeFloat64(src, pos) - if ret >= 0 { - return ret, types.JsonState{Vt: types.V_DOUBLE, Dv: fv, Ep: pos} + if skipnum { + ret = skipNumber(src, pos) + if ret >= 0 { + return ret, types.JsonState{Vt: types.V_DOUBLE, Iv: 0, Ep: pos} + } else { + return ret, types.JsonState{Vt: types.ValueType(ret)} + } } else { - return ret, types.JsonState{Vt: types.ValueType(ret)} + var iv int64 + ret, iv, _ = decodeInt64(src, pos) + if ret >= 0 { + return ret, types.JsonState{Vt: types.V_INTEGER, Iv: iv, Ep: pos} + } else if ret != -int(types.ERR_INVALID_NUMBER_FMT) { + return ret, types.JsonState{Vt: types.ValueType(ret)} + } + var fv float64 + ret, fv, _ = decodeFloat64(src, pos) + if ret >= 0 { + return ret, types.JsonState{Vt: types.V_DOUBLE, Dv: fv, Ep: pos} + } else { + return ret, types.JsonState{Vt: types.ValueType(ret)} + } } + default: return -int(types.ERR_INVALID_CHAR), types.JsonState{Vt:-types.ValueType(types.ERR_INVALID_CHAR)} } @@ -277,67 +291,7 @@ func decodeValue(src string, pos int) (ret int, v types.JsonState) { //go:nocheckptr func skipNumber(src string, pos int) (ret int) { - sp := uintptr(rt.IndexChar(src, pos)) - se := uintptr(rt.IndexChar(src, len(src))) - if uintptr(sp) >= se { - return -int(types.ERR_EOF) - } - - if c := *(*byte)(unsafe.Pointer(sp)); c == '-' { - sp += 1 - } - ss := sp - - var pointer bool - var exponent bool - var lastIsDigit bool - var nextNeedDigit = true - - for ; sp < se; sp += uintptr(1) { - c := *(*byte)(unsafe.Pointer(sp)) - if isDigit(c) { - lastIsDigit = true - nextNeedDigit = false - continue - } else if nextNeedDigit { - return -int(types.ERR_INVALID_CHAR) - } else if c == '.' { - if !lastIsDigit || pointer || exponent || sp == ss { - return -int(types.ERR_INVALID_CHAR) - } - pointer = true - lastIsDigit = false - nextNeedDigit = true - continue - } else if c == 'e' || c == 'E' { - if !lastIsDigit || exponent { - return -int(types.ERR_INVALID_CHAR) - } - if sp == se-1 { - return -int(types.ERR_EOF) - } - exponent = true - lastIsDigit = false - nextNeedDigit = false - continue - } else if c == '-' || c == '+' { - if prev := *(*byte)(unsafe.Pointer(sp - 1)); prev != 'e' && prev != 'E' { - return -int(types.ERR_INVALID_CHAR) - } - lastIsDigit = false - nextNeedDigit = true - continue - } else { - break - } - } - - if nextNeedDigit { - return -int(types.ERR_EOF) - } - - runtime.KeepAlive(src) - return int(uintptr(sp) - uintptr((*rt.GoString)(unsafe.Pointer(&src)).Ptr)) + return utils.SkipNumber(src, pos) } //go:nocheckptr @@ -573,3 +527,36 @@ func skipArray(src string, pos int) (ret int, start int) { pos++ } } + +// DecodeString decodes a JSON string from pos and return golang string. +// - needEsc indicates if to unescaped escaping chars +// - hasEsc tells if the returned string has escaping chars +// - validStr enables validating UTF8 charset +// +func _DecodeString(src string, pos int, needEsc bool, validStr bool) (v string, ret int, hasEsc bool) { + p := NewParserObj(src) + p.p = pos + switch val := p.decodeValue(); val.Vt { + case types.V_STRING: + str := p.s[val.Iv : p.p-1] + if validStr && !validate_utf8(str) { + return "", -int(types.ERR_INVALID_UTF8), false + } + /* fast path: no escape sequence */ + if val.Ep == -1 { + return str, p.p, false + } else if !needEsc { + return str, p.p, true + } + /* unquote the string */ + out, err := unquote(str) + /* check for errors */ + if err != 0 { + return "", -int(err), true + } else { + return out, p.p, true + } + default: + return "", -int(_ERR_UNSUPPORT_TYPE), false + } +} diff --git a/vendor/github.com/bytedance/sonic/ast/encode.go b/vendor/github.com/bytedance/sonic/ast/encode.go index 1187e30c260832e093c1d577554b523a89255209..eae0bd258ba4c92193704edf4f7342c8a8ada8a1 100644 --- a/vendor/github.com/bytedance/sonic/ast/encode.go +++ b/vendor/github.com/bytedance/sonic/ast/encode.go @@ -17,14 +17,11 @@ package ast import ( - `sync` - `unicode/utf8` + "sync" + "unicode/utf8" - `github.com/bytedance/sonic/internal/rt` -) - -const ( - _MaxBuffer = 1024 // 1KB buffer size + "github.com/bytedance/sonic/internal/rt" + "github.com/bytedance/sonic/option" ) func quoteString(e *[]byte, s string) { @@ -32,7 +29,7 @@ func quoteString(e *[]byte, s string) { start := 0 for i := 0; i < len(s); { if b := s[i]; b < utf8.RuneSelf { - if safeSet[b] { + if rt.SafeSet[b] { i++ continue } @@ -56,8 +53,8 @@ func quoteString(e *[]byte, s string) { // user-controlled strings are rendered into JSON // and served to some browsers. *e = append(*e, `u00`...) - *e = append(*e, hex[b>>4]) - *e = append(*e, hex[b&0xF]) + *e = append(*e, rt.Hex[b>>4]) + *e = append(*e, rt.Hex[b&0xF]) } i++ start = i @@ -78,7 +75,7 @@ func quoteString(e *[]byte, s string) { *e = append(*e, s[start:i]...) } *e = append(*e, `\u202`...) - *e = append(*e, hex[c&0xF]) + *e = append(*e, rt.Hex[c&0xF]) i += size start = i continue @@ -94,16 +91,24 @@ func quoteString(e *[]byte, s string) { var bytesPool = sync.Pool{} func (self *Node) MarshalJSON() ([]byte, error) { + if self == nil { + return bytesNull, nil + } + buf := newBuffer() err := self.encode(buf) if err != nil { freeBuffer(buf) return nil, err } - - ret := make([]byte, len(*buf)) - copy(ret, *buf) - freeBuffer(buf) + var ret []byte + if !rt.CanSizeResue(cap(*buf)) { + ret = *buf + } else { + ret = make([]byte, len(*buf)) + copy(ret, *buf) + freeBuffer(buf) + } return ret, err } @@ -111,21 +116,24 @@ func newBuffer() *[]byte { if ret := bytesPool.Get(); ret != nil { return ret.(*[]byte) } else { - buf := make([]byte, 0, _MaxBuffer) + buf := make([]byte, 0, option.DefaultAstBufferSize) return &buf } } func freeBuffer(buf *[]byte) { + if !rt.CanSizeResue(cap(*buf)) { + return + } *buf = (*buf)[:0] bytesPool.Put(buf) } func (self *Node) encode(buf *[]byte) error { - if self.IsRaw() { + if self.isRaw() { return self.encodeRaw(buf) } - switch self.Type() { + switch int(self.itype()) { case V_NONE : return ErrNotExist case V_ERROR : return self.Check() case V_NULL : return self.encodeNull(buf) @@ -141,16 +149,21 @@ func (self *Node) encode(buf *[]byte) error { } func (self *Node) encodeRaw(buf *[]byte) error { - raw, err := self.Raw() - if err != nil { - return err + lock := self.rlock() + if !self.isRaw() { + self.runlock() + return self.encode(buf) + } + raw := self.toString() + if lock { + self.runlock() } *buf = append(*buf, raw...) return nil } func (self *Node) encodeNull(buf *[]byte) error { - *buf = append(*buf, bytesNull...) + *buf = append(*buf, strNull...) return nil } @@ -165,18 +178,18 @@ func (self *Node) encodeFalse(buf *[]byte) error { } func (self *Node) encodeNumber(buf *[]byte) error { - str := rt.StrFrom(self.p, self.v) + str := self.toString() *buf = append(*buf, str...) return nil } func (self *Node) encodeString(buf *[]byte) error { - if self.v == 0 { + if self.l == 0 { *buf = append(*buf, '"', '"') return nil } - quote(buf, rt.StrFrom(self.p, self.v)) + quote(buf, self.toString()) return nil } @@ -195,16 +208,17 @@ func (self *Node) encodeArray(buf *[]byte) error { *buf = append(*buf, '[') - var p = (*Node)(self.p) - err := p.encode(buf) - if err != nil { - return err - } - for i := 1; i < nb; i++ { - *buf = append(*buf, ',') - p = p.unsafe_next() - err := p.encode(buf) - if err != nil { + var started bool + for i := 0; i < nb; i++ { + n := self.nodeAt(i) + if !n.Exists() { + continue + } + if started { + *buf = append(*buf, ',') + } + started = true + if err := n.encode(buf); err != nil { return err } } @@ -240,20 +254,21 @@ func (self *Node) encodeObject(buf *[]byte) error { *buf = append(*buf, '{') - var p = (*Pair)(self.p) - err := p.encode(buf) - if err != nil { - return err - } - for i := 1; i < nb; i++ { - *buf = append(*buf, ',') - p = p.unsafe_next() - err := p.encode(buf) - if err != nil { + var started bool + for i := 0; i < nb; i++ { + n := self.pairAt(i) + if n == nil || !n.Value.Exists() { + continue + } + if started { + *buf = append(*buf, ',') + } + started = true + if err := n.encode(buf); err != nil { return err } } *buf = append(*buf, '}') return nil -} \ No newline at end of file +} diff --git a/vendor/github.com/bytedance/sonic/ast/error.go b/vendor/github.com/bytedance/sonic/ast/error.go index f4c441ae6c7d1592ef5bd3668bc298612548c1c2..3716e7a91bd0b1e3e42b08ba087e1fef0ca2ab30 100644 --- a/vendor/github.com/bytedance/sonic/ast/error.go +++ b/vendor/github.com/bytedance/sonic/ast/error.go @@ -8,23 +8,59 @@ import ( `github.com/bytedance/sonic/internal/native/types` ) -func (self *Parser) syntaxError(err types.ParsingError) SyntaxError { - return SyntaxError{ - Pos : self.p, - Src : self.s, - Code: err, + +func newError(err types.ParsingError, msg string) *Node { + return &Node{ + t: V_ERROR, + l: uint(err), + p: unsafe.Pointer(&msg), } } +func newErrorPair(err SyntaxError) *Pair { + return &Pair{0, "", *newSyntaxError(err)} +} + +// Error returns error message if the node is invalid +func (self Node) Error() string { + if self.t != V_ERROR { + return "" + } else { + return *(*string)(self.p) + } +} + func newSyntaxError(err SyntaxError) *Node { msg := err.Description() return &Node{ t: V_ERROR, - v: int64(err.Code), + l: uint(err.Code), p: unsafe.Pointer(&msg), } } +func (self *Parser) syntaxError(err types.ParsingError) SyntaxError { + return SyntaxError{ + Pos : self.p, + Src : self.s, + Code: err, + } +} + +func unwrapError(err error) *Node { + if se, ok := err.(*Node); ok { + return se + }else if sse, ok := err.(Node); ok { + return &sse + } else { + msg := err.Error() + return &Node{ + t: V_ERROR, + p: unsafe.Pointer(&msg), + } + } +} + type SyntaxError struct { Pos int Src string @@ -47,7 +83,7 @@ func (self SyntaxError) description() string { /* check for empty source */ if self.Src == "" { - return fmt.Sprintf("no sources available: %#v", self) + return fmt.Sprintf("no sources available, the input json is empty: %#v", self) } /* prevent slicing before the beginning */ diff --git a/vendor/github.com/bytedance/sonic/ast/iterator.go b/vendor/github.com/bytedance/sonic/ast/iterator.go index 03a25b4e9ff53bdf2da50e3fa56982bee4cf23b5..076647154441fb4491d2248cd66a928240ce7c71 100644 --- a/vendor/github.com/bytedance/sonic/ast/iterator.go +++ b/vendor/github.com/bytedance/sonic/ast/iterator.go @@ -17,30 +17,48 @@ package ast import ( - `fmt` + "fmt" - `github.com/bytedance/sonic/internal/native/types` + "github.com/bytedance/sonic/internal/caching" + "github.com/bytedance/sonic/internal/native/types" ) type Pair struct { + hash uint64 Key string Value Node } +func NewPair(key string, val Node) Pair { + return Pair{ + hash: caching.StrHash(key), + Key: key, + Value: val, + } +} + // Values returns iterator for array's children traversal func (self *Node) Values() (ListIterator, error) { - if err := self.should(types.V_ARRAY, "an array"); err != nil { + if err := self.should(types.V_ARRAY); err != nil { return ListIterator{}, err } - return ListIterator{Iterator{p: self}}, nil + return self.values(), nil +} + +func (self *Node) values() ListIterator { + return ListIterator{Iterator{p: self}} } // Properties returns iterator for object's children traversal func (self *Node) Properties() (ObjectIterator, error) { - if err := self.should(types.V_OBJECT, "an object"); err != nil { + if err := self.should(types.V_OBJECT); err != nil { return ObjectIterator{}, err } - return ObjectIterator{Iterator{p: self}}, nil + return self.properties(), nil +} + +func (self *Node) properties() ObjectIterator { + return ObjectIterator{Iterator{p: self}} } type Iterator struct { @@ -82,26 +100,54 @@ type ObjectIterator struct { Iterator } +func (self *ListIterator) next() *Node { +next_start: + if !self.HasNext() { + return nil + } else { + n := self.p.nodeAt(self.i) + self.i++ + if !n.Exists() { + goto next_start + } + return n + } +} + // Next scans through children of underlying V_ARRAY, // copies each child to v, and returns .HasNext(). func (self *ListIterator) Next(v *Node) bool { - if !self.HasNext() { + n := self.next() + if n == nil { return false + } + *v = *n + return true +} + +func (self *ObjectIterator) next() *Pair { +next_start: + if !self.HasNext() { + return nil } else { - *v, self.i = *self.p.nodeAt(self.i), self.i + 1 - return true + n := self.p.pairAt(self.i) + self.i++ + if n == nil || !n.Value.Exists() { + goto next_start + } + return n } } // Next scans through children of underlying V_OBJECT, // copies each child to v, and returns .HasNext(). func (self *ObjectIterator) Next(p *Pair) bool { - if !self.HasNext() { + n := self.next() + if n == nil { return false - } else { - *p, self.i = *self.p.pairAt(self.i), self.i + 1 - return true } + *p = *n + return true } // Sequence represents scanning path of single-layer nodes. @@ -129,36 +175,42 @@ type Scanner func(path Sequence, node *Node) bool // // Especailly, if the node is not V_ARRAY or V_OBJECT, // the node itself will be returned and Sequence.Index == -1. +// +// NOTICE: A unsetted node WON'T trigger sc, but its index still counts into Path.Index func (self *Node) ForEach(sc Scanner) error { + if err := self.checkRaw(); err != nil { + return err + } switch self.itype() { case types.V_ARRAY: - ns, err := self.UnsafeArray() + iter, err := self.Values() if err != nil { return err } - for i := range ns { - if !sc(Sequence{i, nil}, &ns[i]) { - return err + v := iter.next() + for v != nil { + if !sc(Sequence{iter.i-1, nil}, v) { + return nil } + v = iter.next() } case types.V_OBJECT: - ns, err := self.UnsafeMap() + iter, err := self.Properties() if err != nil { return err } - for i := range ns { - if !sc(Sequence{i, &ns[i].Key}, &ns[i].Value) { - return err + v := iter.next() + for v != nil { + if !sc(Sequence{iter.i-1, &v.Key}, &v.Value) { + return nil } + v = iter.next() } default: + if self.Check() != nil { + return self + } sc(Sequence{-1, nil}, self) } - return self.Check() + return nil } - -type PairSlice []Pair - -func (self PairSlice) Sort() { - radixQsort(self, 0, maxDepth(len(self))) -} \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/ast/node.go b/vendor/github.com/bytedance/sonic/ast/node.go index 6b5ad8a3ed87f982ecc40aa0125c288cc9e1e1e9..0fbcf7835c87f98b306fd63afed3a15db01cfbce 100644 --- a/vendor/github.com/bytedance/sonic/ast/node.go +++ b/vendor/github.com/bytedance/sonic/ast/node.go @@ -17,22 +17,15 @@ package ast import ( - `encoding/json` - `fmt` - `strconv` - `unsafe` - `reflect` - - `github.com/bytedance/sonic/internal/native/types` - `github.com/bytedance/sonic/internal/rt` -) - -const ( - _CAP_BITS = 32 - _LEN_MASK = 1 << _CAP_BITS - 1 - - _NODE_SIZE = unsafe.Sizeof(Node{}) - _PAIR_SIZE = unsafe.Sizeof(Pair{}) + "encoding/json" + "fmt" + "strconv" + "sync" + "sync/atomic" + "unsafe" + + "github.com/bytedance/sonic/internal/native/types" + "github.com/bytedance/sonic/internal/rt" ) const ( @@ -45,46 +38,43 @@ const ( _V_ARRAY_LAZY = _V_LAZY | types.V_ARRAY _V_OBJECT_LAZY = _V_LAZY | types.V_OBJECT _MASK_LAZY = _V_LAZY - 1 - _MASK_RAW = _V_RAW - 1 + _MASK_RAW = _V_RAW - 1 ) const ( V_NONE = 0 V_ERROR = 1 - V_NULL = 2 - V_TRUE = 3 - V_FALSE = 4 - V_ARRAY = 5 - V_OBJECT = 6 - V_STRING = 7 + V_NULL = int(types.V_NULL) + V_TRUE = int(types.V_TRUE) + V_FALSE = int(types.V_FALSE) + V_ARRAY = int(types.V_ARRAY) + V_OBJECT = int(types.V_OBJECT) + V_STRING = int(types.V_STRING) V_NUMBER = int(_V_NUMBER) V_ANY = int(_V_ANY) ) -var ( - byteType = rt.UnpackType(reflect.TypeOf(byte(0))) -) - type Node struct { - v int64 t types.ValueType + l uint p unsafe.Pointer + m *sync.RWMutex } // UnmarshalJSON is just an adapter to json.Unmarshaler. // If you want better performance, use Searcher.GetByPath() directly func (self *Node) UnmarshalJSON(data []byte) (err error) { - *self, err = NewSearcher(string(data)).GetByPath() - return + *self = NewRaw(string(data)) + return self.Check() } /** Node Type Accessor **/ // Type returns json type represented by the node // It will be one of belows: -// V_NONE = 0 (empty node) +// V_NONE = 0 (empty node, key not exists) // V_ERROR = 1 (error node) -// V_NULL = 2 (json value `null`) +// V_NULL = 2 (json value `null`, key exists) // V_TRUE = 3 (json value `true`) // V_FALSE = 4 (json value `false`) // V_ARRAY = 5 (json value array) @@ -92,17 +82,39 @@ func (self *Node) UnmarshalJSON(data []byte) (err error) { // V_STRING = 7 (json value string) // V_NUMBER = 33 (json value number ) // V_ANY = 34 (golang interface{}) +// +// Deprecated: not concurrent safe. Use TypeSafe instead func (self Node) Type() int { return int(self.t & _MASK_LAZY & _MASK_RAW) } -func (self Node) itype() types.ValueType { +// Type concurrently-safe returns json type represented by the node +// It will be one of belows: +// V_NONE = 0 (empty node, key not exists) +// V_ERROR = 1 (error node) +// V_NULL = 2 (json value `null`, key exists) +// V_TRUE = 3 (json value `true`) +// V_FALSE = 4 (json value `false`) +// V_ARRAY = 5 (json value array) +// V_OBJECT = 6 (json value object) +// V_STRING = 7 (json value string) +// V_NUMBER = 33 (json value number ) +// V_ANY = 34 (golang interface{}) +func (self *Node) TypeSafe() int { + return int(self.loadt() & _MASK_LAZY & _MASK_RAW) +} + +func (self *Node) itype() types.ValueType { return self.t & _MASK_LAZY & _MASK_RAW } // Exists returns false only if the self is nil or empty node V_NONE func (self *Node) Exists() bool { - return self != nil && self.t != _V_NONE + if self == nil { + return false + } + t := self.loadt() + return t != V_ERROR && t != _V_NONE } // Valid reports if self is NOT V_ERROR or nil @@ -110,63 +122,72 @@ func (self *Node) Valid() bool { if self == nil { return false } - return self.t != V_ERROR + return self.loadt() != V_ERROR } // Check checks if the node itself is valid, and return: -// - ErrNotFound If the node is nil +// - ErrNotExist If the node is nil // - Its underlying error If the node is V_ERROR func (self *Node) Check() error { if self == nil { return ErrNotExist - } else if self.t != V_ERROR { + } else if self.loadt() != V_ERROR { return nil } else { return self } } -// Error returns error message if the node is invalid -func (self Node) Error() string { - if self.t != V_ERROR { - return "" - } else { - return *(*string)(self.p) - } +// isRaw returns true if node's underlying value is raw json +// +// Deprecated: not concurent safe +func (self Node) IsRaw() bool { + return self.t & _V_RAW != 0 } // IsRaw returns true if node's underlying value is raw json -func (self Node) IsRaw() bool { - return self.t&_V_RAW != 0 +func (self *Node) isRaw() bool { + return self.loadt() & _V_RAW != 0 } func (self *Node) isLazy() bool { - return self != nil && self.t&_V_LAZY != 0 + return self != nil && self.t & _V_LAZY != 0 } func (self *Node) isAny() bool { - return self != nil && self.t == _V_ANY + return self != nil && self.loadt() == _V_ANY } /** Simple Value Methods **/ // Raw returns json representation of the node, func (self *Node) Raw() (string, error) { - if !self.IsRaw() { + if self == nil { + return "", ErrNotExist + } + lock := self.rlock() + if !self.isRaw() { + if lock { + self.runlock() + } buf, err := self.MarshalJSON() return rt.Mem2Str(buf), err } - return rt.StrFrom(self.p, self.v), nil + ret := self.toString() + if lock { + self.runlock() + } + return ret, nil } func (self *Node) checkRaw() error { if err := self.Check(); err != nil { return err } - if self.IsRaw() { + if self.isRaw() { self.parseRaw(false) } - return nil + return self.Check() } // Bool returns bool value represented by this node, @@ -181,14 +202,14 @@ func (self *Node) Bool() (bool, error) { case types.V_FALSE : return false, nil case types.V_NULL : return false, nil case _V_NUMBER : - if i, err := numberToInt64(self); err == nil { + if i, err := self.toInt64(); err == nil { return i != 0, nil - } else if f, err := numberToFloat64(self); err == nil { + } else if f, err := self.toFloat64(); err == nil { return f != 0, nil } else { return false, err } - case types.V_STRING: return strconv.ParseBool(rt.StrFrom(self.p, self.v)) + case types.V_STRING: return strconv.ParseBool(self.toString()) case _V_ANY : any := self.packAny() switch v := any.(type) { @@ -229,9 +250,9 @@ func (self *Node) Int64() (int64, error) { } switch self.t { case _V_NUMBER, types.V_STRING : - if i, err := numberToInt64(self); err == nil { + if i, err := self.toInt64(); err == nil { return i, nil - } else if f, err := numberToFloat64(self); err == nil { + } else if f, err := self.toFloat64(); err == nil { return int64(f), nil } else { return 0, err @@ -283,7 +304,7 @@ func (self *Node) StrictInt64() (int64, error) { return 0, err } switch self.t { - case _V_NUMBER : return numberToInt64(self) + case _V_NUMBER : return self.toInt64() case _V_ANY : any := self.packAny() switch v := any.(type) { @@ -325,12 +346,12 @@ func (self *Node) Number() (json.Number, error) { return json.Number(""), err } switch self.t { - case _V_NUMBER : return toNumber(self) , nil + case _V_NUMBER : return self.toNumber(), nil case types.V_STRING : - if _, err := numberToInt64(self); err == nil { - return toNumber(self), nil - } else if _, err := numberToFloat64(self); err == nil { - return toNumber(self), nil + if _, err := self.toInt64(); err == nil { + return self.toNumber(), nil + } else if _, err := self.toFloat64(); err == nil { + return self.toNumber(), nil } else { return json.Number(""), err } @@ -372,7 +393,7 @@ func (self *Node) StrictNumber() (json.Number, error) { return json.Number(""), err } switch self.t { - case _V_NUMBER : return toNumber(self) , nil + case _V_NUMBER : return self.toNumber() , nil case _V_ANY : if v, ok := self.packAny().(json.Number); ok { return v, nil @@ -394,7 +415,7 @@ func (self *Node) String() (string, error) { case types.V_NULL : return "" , nil case types.V_TRUE : return "true" , nil case types.V_FALSE : return "false", nil - case types.V_STRING, _V_NUMBER : return rt.StrFrom(self.p, self.v), nil + case types.V_STRING, _V_NUMBER : return self.toString(), nil case _V_ANY : any := self.packAny() switch v := any.(type) { @@ -426,7 +447,7 @@ func (self *Node) StrictString() (string, error) { return "", err } switch self.t { - case types.V_STRING : return rt.StrFrom(self.p, self.v), nil + case types.V_STRING : return self.toString(), nil case _V_ANY : if v, ok := self.packAny().(string); ok { return v, nil @@ -445,7 +466,7 @@ func (self *Node) Float64() (float64, error) { return 0.0, err } switch self.t { - case _V_NUMBER, types.V_STRING : return numberToFloat64(self) + case _V_NUMBER, types.V_STRING : return self.toFloat64() case types.V_TRUE : return 1.0, nil case types.V_FALSE : return 0.0, nil case types.V_NULL : return 0.0, nil @@ -494,7 +515,7 @@ func (self *Node) StrictFloat64() (float64, error) { return 0.0, err } switch self.t { - case _V_NUMBER : return numberToFloat64(self) + case _V_NUMBER : return self.toFloat64() case _V_ANY : any := self.packAny() switch v := any.(type) { @@ -509,15 +530,13 @@ func (self *Node) StrictFloat64() (float64, error) { /** Sequencial Value Methods **/ // Len returns children count of a array|object|string node -// For partially loaded node, it also works but only counts the parsed children +// WARN: For partially loaded node, it also works but only counts the parsed children func (self *Node) Len() (int, error) { if err := self.checkRaw(); err != nil { return 0, err } - if self.t == types.V_ARRAY || self.t == types.V_OBJECT || self.t == _V_ARRAY_LAZY || self.t == _V_OBJECT_LAZY { - return int(self.v & _LEN_MASK), nil - } else if self.t == types.V_STRING { - return int(self.v), nil + if self.t == types.V_ARRAY || self.t == types.V_OBJECT || self.t == _V_ARRAY_LAZY || self.t == _V_OBJECT_LAZY || self.t == types.V_STRING { + return int(self.l), nil } else if self.t == _V_NONE || self.t == types.V_NULL { return 0, nil } else { @@ -525,8 +544,8 @@ func (self *Node) Len() (int, error) { } } -func (self Node) len() int { - return int(self.v & _LEN_MASK) +func (self *Node) len() int { + return int(self.l) } // Cap returns malloc capacity of a array|object node for children @@ -534,47 +553,44 @@ func (self *Node) Cap() (int, error) { if err := self.checkRaw(); err != nil { return 0, err } - if self.t == types.V_ARRAY || self.t == types.V_OBJECT || self.t == _V_ARRAY_LAZY || self.t == _V_OBJECT_LAZY { - return int(self.v >> _CAP_BITS), nil - } else if self.t == _V_NONE || self.t == types.V_NULL { - return 0, nil - } else { - return 0, ErrUnsupportType + switch self.t { + case types.V_ARRAY: return (*linkedNodes)(self.p).Cap(), nil + case types.V_OBJECT: return (*linkedPairs)(self.p).Cap(), nil + case _V_ARRAY_LAZY: return (*parseArrayStack)(self.p).v.Cap(), nil + case _V_OBJECT_LAZY: return (*parseObjectStack)(self.p).v.Cap(), nil + case _V_NONE, types.V_NULL: return 0, nil + default: return 0, ErrUnsupportType } } -func (self Node) cap() int { - return int(self.v >> _CAP_BITS) -} - // Set sets the node of given key under self, and reports if the key has existed. // // If self is V_NONE or V_NULL, it becomes V_OBJECT and sets the node at the key. func (self *Node) Set(key string, node Node) (bool, error) { - if self != nil && (self.t == _V_NONE || self.t == types.V_NULL) { - *self = NewObject([]Pair{{key, node}}) - return false, nil + if err := self.checkRaw(); err != nil { + return false, err } - if err := node.Check(); err != nil { return false, err } + + if self.t == _V_NONE || self.t == types.V_NULL { + *self = NewObject([]Pair{NewPair(key, node)}) + return false, nil + } else if self.itype() != types.V_OBJECT { + return false, ErrUnsupportType + } p := self.Get(key) + if !p.Exists() { - l := self.len() - c := self.cap() - if l == c { - // TODO: maybe change append size in future - c += _DEFAULT_NODE_CAP - mem := unsafe_NewArray(_PAIR_TYPE, c) - memmove(mem, self.p, _PAIR_SIZE * uintptr(l)) - self.p = mem + // self must be fully-loaded here + if self.len() == 0 { + *self = newObject(new(linkedPairs)) } - v := self.pairAt(l) - v.Key = key - v.Value = node - self.setCapAndLen(c, l+1) + s := (*linkedPairs)(self.p) + s.Push(NewPair(key, node)) + self.l++ return false, nil } else if err := p.Check(); err != nil { @@ -590,17 +606,22 @@ func (self *Node) SetAny(key string, val interface{}) (bool, error) { return self.Set(key, NewAny(val)) } -// Unset remove the node of given key under object parent, and reports if the key has existed. +// Unset REMOVE (soft) the node of given key under object parent, and reports if the key has existed. func (self *Node) Unset(key string) (bool, error) { - self.must(types.V_OBJECT, "an object") + if err := self.should(types.V_OBJECT); err != nil { + return false, err + } + // NOTICE: must get acurate length before deduct + if err := self.skipAllKey(); err != nil { + return false, err + } p, i := self.skipKey(key) if !p.Exists() { return false, nil } else if err := p.Check(); err != nil { return false, err } - - self.removePair(i) + self.removePairAt(i) return true, nil } @@ -608,10 +629,18 @@ func (self *Node) Unset(key string) (bool, error) { // // The index must be within self's children. func (self *Node) SetByIndex(index int, node Node) (bool, error) { + if err := self.checkRaw(); err != nil { + return false, err + } if err := node.Check(); err != nil { return false, err } + if index == 0 && (self.t == _V_NONE || self.t == types.V_NULL) { + *self = NewArray([]Node{node}) + return false, nil + } + p := self.Index(index) if !p.Exists() { return false, ErrNotExist @@ -628,14 +657,28 @@ func (self *Node) SetAnyByIndex(index int, val interface{}) (bool, error) { return self.SetByIndex(index, NewAny(val)) } -// UnsetByIndex remove the node of given index +// UnsetByIndex REOMVE (softly) the node of given index. +// +// WARN: this will change address of elements, which is a dangerous action. +// Use Unset() for object or Pop() for array instead. func (self *Node) UnsetByIndex(index int) (bool, error) { + if err := self.checkRaw(); err != nil { + return false, err + } + var p *Node it := self.itype() + if it == types.V_ARRAY { - p = self.Index(index) - }else if it == types.V_OBJECT { - pr := self.skipIndexPair(index) + if err := self.skipAllIndex(); err != nil { + return false, err + } + p = self.nodeAt(index) + } else if it == types.V_OBJECT { + if err := self.skipAllKey(); err != nil { + return false, err + } + pr := self.pairAt(index) if pr == nil { return false, ErrNotExist } @@ -648,6 +691,12 @@ func (self *Node) UnsetByIndex(index int) (bool, error) { return false, ErrNotExist } + // last elem + if index == self.len() - 1 { + return true, self.Pop() + } + + // not last elem, self.len() change but linked-chunk not change if it == types.V_ARRAY { self.removeNode(index) }else if it == types.V_OBJECT { @@ -660,28 +709,110 @@ func (self *Node) UnsetByIndex(index int) (bool, error) { // // If self is V_NONE or V_NULL, it becomes V_ARRAY and sets the node at index 0. func (self *Node) Add(node Node) error { + if err := self.checkRaw(); err != nil { + return err + } + if self != nil && (self.t == _V_NONE || self.t == types.V_NULL) { *self = NewArray([]Node{node}) return nil } + if err := self.should(types.V_ARRAY); err != nil { + return err + } - if err := self.should(types.V_ARRAY, "an array"); err != nil { + s, err := self.unsafeArray() + if err != nil { return err } - if err := self.skipAllIndex(); err != nil { + + // Notice: array won't have unset node in tail + s.Push(node) + self.l++ + return nil +} + +// Pop remove the last child of the V_Array or V_Object node. +func (self *Node) Pop() error { + if err := self.checkRaw(); err != nil { return err } - var p rt.GoSlice - p.Cap = self.cap() - p.Len = self.len() - p.Ptr = self.p + if it := self.itype(); it == types.V_ARRAY { + s, err := self.unsafeArray() + if err != nil { + return err + } + // remove tail unset nodes + for i := s.Len()-1; i >= 0; i-- { + if s.At(i).Exists() { + s.Pop() + self.l-- + break + } + s.Pop() + } + + } else if it == types.V_OBJECT { + s, err := self.unsafeMap() + if err != nil { + return err + } + // remove tail unset nodes + for i := s.Len()-1; i >= 0; i-- { + if p := s.At(i); p != nil && p.Value.Exists() { + s.Pop() + self.l-- + break + } + s.Pop() + } - s := *(*[]Node)(unsafe.Pointer(&p)) - s = append(s, node) + } else { + return ErrUnsupportType + } - self.p = unsafe.Pointer(&s[0]) - self.setCapAndLen(cap(s), len(s)) + return nil +} + +// Move moves the child at src index to dst index, +// meanwhile slides sliblings from src+1 to dst. +// +// WARN: this will change address of elements, which is a dangerous action. +func (self *Node) Move(dst, src int) error { + if err := self.should(types.V_ARRAY); err != nil { + return err + } + + s, err := self.unsafeArray() + if err != nil { + return err + } + + // check if any unset node exists + if l := s.Len(); self.len() != l { + di, si := dst, src + // find real pos of src and dst + for i := 0; i < l; i++ { + if s.At(i).Exists() { + di-- + si-- + } + if di == -1 { + dst = i + di-- + } + if si == -1 { + src = i + si-- + } + if di == -2 && si == -2 { + break + } + } + } + + s.MoveOne(src, dst) return nil } @@ -721,7 +852,7 @@ func (self *Node) GetByPath(path ...interface{}) *Node { // Get loads given key of an object node on demands func (self *Node) Get(key string) *Node { - if err := self.should(types.V_OBJECT, "an object"); err != nil { + if err := self.should(types.V_OBJECT); err != nil { return unwrapError(err) } n, _ := self.skipKey(key) @@ -754,25 +885,36 @@ func (self *Node) Index(idx int) *Node { // IndexPair indexies pair at given idx, // node type MUST be either V_OBJECT func (self *Node) IndexPair(idx int) *Pair { - if err := self.should(types.V_OBJECT, "an object"); err != nil { + if err := self.should(types.V_OBJECT); err != nil { return nil } return self.skipIndexPair(idx) } -// IndexOrGet firstly use idx to index a value and check if its key matches -// If not, then use the key to search value -func (self *Node) IndexOrGet(idx int, key string) *Node { - if err := self.should(types.V_OBJECT, "an object"); err != nil { - return unwrapError(err) +func (self *Node) indexOrGet(idx int, key string) (*Node, int) { + if err := self.should(types.V_OBJECT); err != nil { + return unwrapError(err), idx } pr := self.skipIndexPair(idx) if pr != nil && pr.Key == key { - return &pr.Value + return &pr.Value, idx } - n, _ := self.skipKey(key) - return n + + return self.skipKey(key) +} + +// IndexOrGet firstly use idx to index a value and check if its key matches +// If not, then use the key to search value +func (self *Node) IndexOrGet(idx int, key string) *Node { + node, _ := self.indexOrGet(idx, key) + return node +} + +// IndexOrGetWithIdx attempts to retrieve a node by index and key, returning the node and its correct index. +// If the key does not match at the given index, it searches by key and returns the node with its updated index. +func (self *Node) IndexOrGetWithIdx(idx int, key string) (*Node, int) { + return self.indexOrGet(idx, key) } /** Generic Value Converters **/ @@ -787,10 +929,10 @@ func (self *Node) Map() (map[string]interface{}, error) { return nil, ErrUnsupportType } } - if err := self.should(types.V_OBJECT, "an object"); err != nil { + if err := self.should(types.V_OBJECT); err != nil { return nil, err } - if err := self.loadAllKey(); err != nil { + if err := self.loadAllKey(false); err != nil { return nil, err } return self.toGenericObject() @@ -806,10 +948,10 @@ func (self *Node) MapUseNumber() (map[string]interface{}, error) { return nil, ErrUnsupportType } } - if err := self.should(types.V_OBJECT, "an object"); err != nil { + if err := self.should(types.V_OBJECT); err != nil { return nil, err } - if err := self.loadAllKey(); err != nil { + if err := self.loadAllKey(false); err != nil { return nil, err } return self.toGenericObjectUseNumber() @@ -826,7 +968,7 @@ func (self *Node) MapUseNode() (map[string]Node, error) { return nil, ErrUnsupportType } } - if err := self.should(types.V_OBJECT, "an object"); err != nil { + if err := self.should(types.V_OBJECT); err != nil { return nil, err } if err := self.skipAllKey(); err != nil { @@ -837,30 +979,74 @@ func (self *Node) MapUseNode() (map[string]Node, error) { // MapUnsafe exports the underlying pointer to its children map // WARN: don't use it unless you know what you are doing -func (self *Node) UnsafeMap() ([]Pair, error) { - if err := self.should(types.V_OBJECT, "an object"); err != nil { - return nil, err - } +// +// Deprecated: this API now returns copied nodes instead of directly reference, +// func (self *Node) UnsafeMap() ([]Pair, error) { +// if err := self.should(types.V_OBJECT, "an object"); err != nil { +// return nil, err +// } +// if err := self.skipAllKey(); err != nil { +// return nil, err +// } +// return self.toGenericObjectUsePair() +// } + +//go:nocheckptr +func (self *Node) unsafeMap() (*linkedPairs, error) { if err := self.skipAllKey(); err != nil { return nil, err } - s := rt.Ptr2SlicePtr(self.p, int(self.len()), self.cap()) - return *(*[]Pair)(s), nil + if self.p == nil { + *self = newObject(new(linkedPairs)) + } + return (*linkedPairs)(self.p), nil } // SortKeys sorts children of a V_OBJECT node in ascending key-order. // If recurse is true, it recursively sorts children's children as long as a V_OBJECT node is found. -func (self *Node) SortKeys(recurse bool) (err error) { - ps, err := self.UnsafeMap() +func (self *Node) SortKeys(recurse bool) error { + // check raw node first + if err := self.checkRaw(); err != nil { + return err + } + if self.itype() == types.V_OBJECT { + return self.sortKeys(recurse) + } else if self.itype() == types.V_ARRAY { + var err error + err2 := self.ForEach(func(path Sequence, node *Node) bool { + it := node.itype() + if it == types.V_ARRAY || it == types.V_OBJECT { + err = node.SortKeys(recurse) + if err != nil { + return false + } + } + return true + }) + if err != nil { + return err + } + return err2 + } else { + return nil + } +} + +func (self *Node) sortKeys(recurse bool) (err error) { + // check raw node first + if err := self.checkRaw(); err != nil { + return err + } + ps, err := self.unsafeMap() if err != nil { return err } - PairSlice(ps).Sort() + ps.Sort() if recurse { var sc Scanner sc = func(path Sequence, node *Node) bool { if node.itype() == types.V_OBJECT { - if err := node.SortKeys(recurse); err != nil { + if err := node.sortKeys(recurse); err != nil { return false } } @@ -871,7 +1057,9 @@ func (self *Node) SortKeys(recurse bool) (err error) { } return true } - self.ForEach(sc) + if err := self.ForEach(sc); err != nil { + return err + } } return nil } @@ -886,10 +1074,10 @@ func (self *Node) Array() ([]interface{}, error) { return nil, ErrUnsupportType } } - if err := self.should(types.V_ARRAY, "an array"); err != nil { + if err := self.should(types.V_ARRAY); err != nil { return nil, err } - if err := self.loadAllIndex(); err != nil { + if err := self.loadAllIndex(false); err != nil { return nil, err } return self.toGenericArray() @@ -905,10 +1093,10 @@ func (self *Node) ArrayUseNumber() ([]interface{}, error) { return nil, ErrUnsupportType } } - if err := self.should(types.V_ARRAY, "an array"); err != nil { + if err := self.should(types.V_ARRAY); err != nil { return nil, err } - if err := self.loadAllIndex(); err != nil { + if err := self.loadAllIndex(false); err != nil { return nil, err } return self.toGenericArrayUseNumber() @@ -925,7 +1113,7 @@ func (self *Node) ArrayUseNode() ([]Node, error) { return nil, ErrUnsupportType } } - if err := self.should(types.V_ARRAY, "an array"); err != nil { + if err := self.should(types.V_ARRAY); err != nil { return nil, err } if err := self.skipAllIndex(); err != nil { @@ -936,15 +1124,27 @@ func (self *Node) ArrayUseNode() ([]Node, error) { // ArrayUnsafe exports the underlying pointer to its children array // WARN: don't use it unless you know what you are doing -func (self *Node) UnsafeArray() ([]Node, error) { - if err := self.should(types.V_ARRAY, "an array"); err != nil { - return nil, err - } +// +// Deprecated: this API now returns copied nodes instead of directly reference, +// which has no difference with ArrayUseNode +// func (self *Node) UnsafeArray() ([]Node, error) { +// if err := self.should(types.V_ARRAY, "an array"); err != nil { +// return nil, err +// } +// if err := self.skipAllIndex(); err != nil { +// return nil, err +// } +// return self.toGenericArrayUseNode() +// } + +func (self *Node) unsafeArray() (*linkedNodes, error) { if err := self.skipAllIndex(); err != nil { return nil, err } - s := rt.Ptr2SlicePtr(self.p, self.len(), self.cap()) - return *(*[]Node)(s), nil + if self.p == nil { + *self = newArray(new(linkedNodes)) + } + return (*linkedNodes)(self.p), nil } // Interface loads all children under all pathes from this node, @@ -961,20 +1161,20 @@ func (self *Node) Interface() (interface{}, error) { case types.V_FALSE : return false, nil case types.V_ARRAY : return self.toGenericArray() case types.V_OBJECT : return self.toGenericObject() - case types.V_STRING : return rt.StrFrom(self.p, self.v), nil + case types.V_STRING : return self.toString(), nil case _V_NUMBER : - v, err := numberToFloat64(self) + v, err := self.toFloat64() if err != nil { return nil, err } return v, nil case _V_ARRAY_LAZY : - if err := self.loadAllIndex(); err != nil { + if err := self.loadAllIndex(false); err != nil { return nil, err } return self.toGenericArray() case _V_OBJECT_LAZY : - if err := self.loadAllKey(); err != nil { + if err := self.loadAllKey(false); err != nil { return nil, err } return self.toGenericObject() @@ -1005,15 +1205,15 @@ func (self *Node) InterfaceUseNumber() (interface{}, error) { case types.V_FALSE : return false, nil case types.V_ARRAY : return self.toGenericArrayUseNumber() case types.V_OBJECT : return self.toGenericObjectUseNumber() - case types.V_STRING : return rt.StrFrom(self.p, self.v), nil - case _V_NUMBER : return toNumber(self), nil + case types.V_STRING : return self.toString(), nil + case _V_NUMBER : return self.toNumber(), nil case _V_ARRAY_LAZY : - if err := self.loadAllIndex(); err != nil { + if err := self.loadAllIndex(false); err != nil { return nil, err } return self.toGenericArrayUseNumber() case _V_OBJECT_LAZY : - if err := self.loadAllKey(); err != nil { + if err := self.loadAllKey(false); err != nil { return nil, err } return self.toGenericObjectUseNumber() @@ -1045,104 +1245,30 @@ func (self *Node) InterfaceUseNode() (interface{}, error) { } } -// LoadAll loads all the node's children and children's children as parsed. -// After calling it, the node can be safely used on concurrency +// LoadAll loads the node's children +// and ensure all its children can be READ concurrently (include its children's children) func (self *Node) LoadAll() error { - if self.IsRaw() { - self.parseRaw(true) - return self.Check() - } - - switch self.itype() { - case types.V_ARRAY: - e := self.len() - if err := self.loadAllIndex(); err != nil { - return err - } - for i := 0; i < e; i++ { - n := self.nodeAt(i) - if n.IsRaw() { - n.parseRaw(true) - } - if err := n.Check(); err != nil { - return err - } - } - return nil - case types.V_OBJECT: - e := self.len() - if err := self.loadAllKey(); err != nil { - return err - } - for i := 0; i < e; i++ { - n := self.pairAt(i) - if n.Value.IsRaw() { - n.Value.parseRaw(true) - } - if err := n.Value.Check(); err != nil { - return err - } - } - return nil - default: - return self.Check() - } + return self.Load() } // Load loads the node's children as parsed. -// After calling it, only the node itself can be used on concurrency (not include its children) +// and ensure all its children can be READ concurrently (include its children's children) func (self *Node) Load() error { - if self.IsRaw() { - self.parseRaw(false) - return self.Load() - } - switch self.t { - case _V_ARRAY_LAZY: - return self.skipAllIndex() - case _V_OBJECT_LAZY: - return self.skipAllKey() - default: - return self.Check() + case _V_ARRAY_LAZY: self.loadAllIndex(true) + case _V_OBJECT_LAZY: self.loadAllKey(true) + case V_ERROR: return self + case V_NONE: return nil } -} - -/**---------------------------------- Internal Helper Methods ----------------------------------**/ - -var ( - _NODE_TYPE = rt.UnpackEface(Node{}).Type - _PAIR_TYPE = rt.UnpackEface(Pair{}).Type -) - -func (self *Node) setCapAndLen(cap int, len int) { - if self.t == types.V_ARRAY || self.t == types.V_OBJECT || self.t == _V_ARRAY_LAZY || self.t == _V_OBJECT_LAZY { - self.v = int64(len&_LEN_MASK | cap<<_CAP_BITS) - } else { - panic("value does not have a length") + if self.m == nil { + self.m = new(sync.RWMutex) } + return self.checkRaw() } -func (self *Node) unsafe_next() *Node { - return (*Node)(unsafe.Pointer(uintptr(unsafe.Pointer(self)) + _NODE_SIZE)) -} - -func (self *Pair) unsafe_next() *Pair { - return (*Pair)(unsafe.Pointer(uintptr(unsafe.Pointer(self)) + _PAIR_SIZE)) -} - -func (self *Node) must(t types.ValueType, s string) { - if err := self.checkRaw(); err != nil { - panic(err) - } - if err := self.Check(); err != nil { - panic(err) - } - if self.itype() != t { - panic("value cannot be represented as " + s) - } -} +/**---------------------------------- Internal Helper Methods ----------------------------------**/ -func (self *Node) should(t types.ValueType, s string) error { +func (self *Node) should(t types.ValueType) error { if err := self.checkRaw(); err != nil { return err } @@ -1153,37 +1279,51 @@ func (self *Node) should(t types.ValueType, s string) error { } func (self *Node) nodeAt(i int) *Node { - var p = self.p + var p *linkedNodes if self.isLazy() { _, stack := self.getParserAndArrayStack() - p = *(*unsafe.Pointer)(unsafe.Pointer(&stack.v)) + p = &stack.v + } else { + p = (*linkedNodes)(self.p) + if l := p.Len(); l != self.len() { + // some nodes got unset, iterate to skip them + for j:=0; j 0 { /* linear search */ var p *Pair + var i int if lazy { s := (*parseObjectStack)(self.p) - p = &s.v[0] + p, i = s.v.Get(key) } else { - p = (*Pair)(self.p) + p, i = (*linkedPairs)(self.p).Get(key) } - if p.Key == key { - return &p.Value, 0 - } - for i := 1; i < nb; i++ { - p = p.unsafe_next() - if p.Key == key { - return &p.Value, i - } + if p != nil { + return &p.Value, i } } @@ -1304,28 +1439,37 @@ func (self *Node) skipIndexPair(index int) *Pair { return nil } -func (self *Node) loadAllIndex() error { +func (self *Node) loadAllIndex(loadOnce bool) error { if !self.isLazy() { return nil } var err types.ParsingError parser, stack := self.getParserAndArrayStack() - parser.noLazy = true - *self, err = parser.decodeArray(stack.v) + if !loadOnce { + parser.noLazy = true + } else { + parser.loadOnce = true + } + *self, err = parser.decodeArray(&stack.v) if err != 0 { return parser.ExportError(err) } return nil } -func (self *Node) loadAllKey() error { +func (self *Node) loadAllKey(loadOnce bool) error { if !self.isLazy() { return nil } var err types.ParsingError parser, stack := self.getParserAndObjectStack() - parser.noLazy = true - *self, err = parser.decodeObject(stack.v) + if !loadOnce { + parser.noLazy = true + *self, err = parser.decodeObject(&stack.v) + } else { + parser.loadOnce = true + *self, err = parser.decodeObject(&stack.v) + } if err != 0 { return parser.ExportError(err) } @@ -1333,63 +1477,50 @@ func (self *Node) loadAllKey() error { } func (self *Node) removeNode(i int) { - nb := self.len() - 1 node := self.nodeAt(i) - if i == nb { - self.setCapAndLen(self.cap(), nb) - *node = Node{} + if node == nil { return } - - from := self.nodeAt(i + 1) - memmove(unsafe.Pointer(node), unsafe.Pointer(from), _NODE_SIZE * uintptr(nb - i)) - - last := self.nodeAt(nb) - *last = Node{} - - self.setCapAndLen(self.cap(), nb) + *node = Node{} + // NOTICE: not be consistent with linkedNode.Len() + self.l-- } func (self *Node) removePair(i int) { - nb := self.len() - 1 - node := self.pairAt(i) - if i == nb { - self.setCapAndLen(self.cap(), nb) - *node = Pair{} + last := self.pairAt(i) + if last == nil { return } - - from := self.pairAt(i + 1) - memmove(unsafe.Pointer(node), unsafe.Pointer(from), _PAIR_SIZE * uintptr(nb - i)) - - last := self.pairAt(nb) *last = Pair{} - - self.setCapAndLen(self.cap(), nb) + // NOTICE: should be consistent with linkedPair.Len() + self.l-- +} + +func (self *Node) removePairAt(i int) { + p := (*linkedPairs)(self.p).At(i) + if p == nil { + return + } + *p = Pair{} + // NOTICE: should be consistent with linkedPair.Len() + self.l-- } func (self *Node) toGenericArray() ([]interface{}, error) { nb := self.len() - ret := make([]interface{}, nb) if nb == 0 { - return ret, nil + return []interface{}{}, nil } - + ret := make([]interface{}, 0, nb) + /* convert each item */ - var p = (*Node)(self.p) - x, err := p.Interface() - if err != nil { - return nil, err - } - ret[0] = x - - for i := 1; i < nb; i++ { - p = p.unsafe_next() - x, err := p.Interface() + it := self.values() + for v := it.next(); v != nil; v = it.next() { + vv, err := v.Interface() if err != nil { return nil, err } - ret[i] = x + ret = append(ret, vv) } /* all done */ @@ -1398,26 +1529,19 @@ func (self *Node) toGenericArray() ([]interface{}, error) { func (self *Node) toGenericArrayUseNumber() ([]interface{}, error) { nb := self.len() - ret := make([]interface{}, nb) if nb == 0 { - return ret, nil + return []interface{}{}, nil } + ret := make([]interface{}, 0, nb) /* convert each item */ - var p = (*Node)(self.p) - x, err := p.InterfaceUseNumber() - if err != nil { - return nil, err - } - ret[0] = x - - for i := 1; i < nb; i++ { - p = p.unsafe_next() - x, err := p.InterfaceUseNumber() + it := self.values() + for v := it.next(); v != nil; v = it.next() { + vv, err := v.InterfaceUseNumber() if err != nil { return nil, err } - ret[i] = x + ret = append(ret, vv) } /* all done */ @@ -1426,50 +1550,32 @@ func (self *Node) toGenericArrayUseNumber() ([]interface{}, error) { func (self *Node) toGenericArrayUseNode() ([]Node, error) { var nb = self.len() - var out = make([]Node, nb) if nb == 0 { - return out, nil + return []Node{}, nil } - var p = (*Node)(self.p) - out[0] = *p - if err := p.Check(); err != nil { - return nil, err - } - - for i := 1; i < nb; i++ { - p = p.unsafe_next() - if err := p.Check(); err != nil { - return nil, err - } - out[i] = *p - } + var s = (*linkedNodes)(self.p) + var out = make([]Node, nb) + s.ToSlice(out) return out, nil } func (self *Node) toGenericObject() (map[string]interface{}, error) { nb := self.len() - ret := make(map[string]interface{}, nb) if nb == 0 { - return ret, nil + return map[string]interface{}{}, nil } + ret := make(map[string]interface{}, nb) /* convert each item */ - var p = (*Pair)(self.p) - x, err := p.Value.Interface() - if err != nil { - return nil, err - } - ret[p.Key] = x - - for i := 1; i < nb; i++ { - p = p.unsafe_next() - x, err := p.Value.Interface() + it := self.properties() + for v := it.next(); v != nil; v = it.next() { + vv, err := v.Value.Interface() if err != nil { return nil, err } - ret[p.Key] = x + ret[v.Key] = vv } /* all done */ @@ -1479,26 +1585,19 @@ func (self *Node) toGenericObject() (map[string]interface{}, error) { func (self *Node) toGenericObjectUseNumber() (map[string]interface{}, error) { nb := self.len() - ret := make(map[string]interface{}, nb) if nb == 0 { - return ret, nil + return map[string]interface{}{}, nil } + ret := make(map[string]interface{}, nb) /* convert each item */ - var p = (*Pair)(self.p) - x, err := p.Value.InterfaceUseNumber() - if err != nil { - return nil, err - } - ret[p.Key] = x - - for i := 1; i < nb; i++ { - p = p.unsafe_next() - x, err := p.Value.InterfaceUseNumber() + it := self.properties() + for v := it.next(); v != nil; v = it.next() { + vv, err := v.Value.InterfaceUseNumber() if err != nil { return nil, err } - ret[p.Key] = x + ret[v.Key] = vv } /* all done */ @@ -1507,24 +1606,13 @@ func (self *Node) toGenericObjectUseNumber() (map[string]interface{}, error) { func (self *Node) toGenericObjectUseNode() (map[string]Node, error) { var nb = self.len() - var out = make(map[string]Node, nb) if nb == 0 { - return out, nil + return map[string]Node{}, nil } - var p = (*Pair)(self.p) - out[p.Key] = p.Value - if err := p.Value.Check(); err != nil { - return nil, err - } - - for i := 1; i < nb; i++ { - p = p.unsafe_next() - if err := p.Value.Check(); err != nil { - return nil, err - } - out[p.Key] = p.Value - } + var s = (*linkedPairs)(self.p) + var out = make(map[string]Node, nb) + s.ToMap(out) /* all done */ return out, nil @@ -1536,15 +1624,28 @@ var ( nullNode = Node{t: types.V_NULL} trueNode = Node{t: types.V_TRUE} falseNode = Node{t: types.V_FALSE} - - emptyArrayNode = Node{t: types.V_ARRAY} - emptyObjectNode = Node{t: types.V_OBJECT} ) // NewRaw creates a node of raw json. // If the input json is invalid, NewRaw returns a error Node. func NewRaw(json string) Node { - parser := NewParser(json) + parser := NewParserObj(json) + start, err := parser.skip() + if err != 0 { + return *newError(err, err.Message()) + } + it := switchRawType(parser.s[start]) + if it == _V_NONE { + return Node{} + } + return newRawNode(parser.s[start:parser.p], it, false) +} + +// NewRawConcurrentRead creates a node of raw json, which can be READ +// (GetByPath/Get/Index/GetOrIndex/Int64/Bool/Float64/String/Number/Interface/Array/Map/Raw/MarshalJSON) concurrently. +// If the input json is invalid, NewRaw returns a error Node. +func NewRawConcurrentRead(json string) Node { + parser := NewParserObj(json) start, err := parser.skip() if err != 0 { return *newError(err, err.Message()) @@ -1553,7 +1654,7 @@ func NewRaw(json string) Node { if it == _V_NONE { return Node{} } - return newRawNode(parser.s[start:parser.p], it) + return newRawNode(parser.s[start:parser.p], it, true) } // NewAny creates a node of type V_ANY if any's type isn't Node or *Node, @@ -1567,7 +1668,6 @@ func NewAny(any interface{}) Node { default: return Node{ t: _V_ANY, - v: 0, p: unsafe.Pointer(&any), } } @@ -1578,14 +1678,13 @@ func NewBytes(src []byte) Node { if len(src) == 0 { panic("empty src bytes") } - out := encodeBase64(src) + out := rt.EncodeBase64(src) return NewString(out) } // NewNull creates a node of type V_NULL func NewNull() Node { return Node{ - v: 0, p: nil, t: types.V_NULL, } @@ -1600,7 +1699,6 @@ func NewBool(v bool) Node { t = types.V_TRUE } return Node{ - v: 0, p: nil, t: t, } @@ -1610,26 +1708,30 @@ func NewBool(v bool) Node { // v must be a decimal string complying with RFC8259 func NewNumber(v string) Node { return Node{ - v: int64(len(v) & _LEN_MASK), + l: uint(len(v)), p: rt.StrPtr(v), t: _V_NUMBER, } } -func toNumber(node *Node) json.Number { - return json.Number(rt.StrFrom(node.p, node.v)) +func (node *Node) toNumber() json.Number { + return json.Number(rt.StrFrom(node.p, int64(node.l))) } -func numberToFloat64(node *Node) (float64, error) { - ret,err := toNumber(node).Float64() +func (self *Node) toString() string { + return rt.StrFrom(self.p, int64(self.l)) +} + +func (node *Node) toFloat64() (float64, error) { + ret, err := node.toNumber().Float64() if err != nil { return 0, err } return ret, nil } -func numberToInt64(node *Node) (int64, error) { - ret,err := toNumber(node).Int64() +func (node *Node) toInt64() (int64, error) { + ret,err := node.toNumber().Int64() if err != nil { return 0, err } @@ -1640,7 +1742,7 @@ func newBytes(v []byte) Node { return Node{ t: types.V_STRING, p: mem2ptr(v), - v: int64(len(v) & _LEN_MASK), + l: uint(len(v)), } } @@ -1652,157 +1754,90 @@ func NewString(v string) Node { return Node{ t: types.V_STRING, p: rt.StrPtr(v), - v: int64(len(v) & _LEN_MASK), + l: uint(len(v)), } } // NewArray creates a node of type V_ARRAY, // using v as its underlying children func NewArray(v []Node) Node { + s := new(linkedNodes) + s.FromSlice(v) + return newArray(s) +} + +const _Threshold_Index = 16 + +func newArray(v *linkedNodes) Node { return Node{ t: types.V_ARRAY, - v: int64(len(v)&_LEN_MASK | cap(v)<<_CAP_BITS), - p: *(*unsafe.Pointer)(unsafe.Pointer(&v)), + l: uint(v.Len()), + p: unsafe.Pointer(v), } } -func (self *Node) setArray(v []Node) { +func (self *Node) setArray(v *linkedNodes) { self.t = types.V_ARRAY - self.setCapAndLen(cap(v), len(v)) - self.p = *(*unsafe.Pointer)(unsafe.Pointer(&v)) + self.l = uint(v.Len()) + self.p = unsafe.Pointer(v) } // NewObject creates a node of type V_OBJECT, // using v as its underlying children func NewObject(v []Pair) Node { - return Node{ - t: types.V_OBJECT, - v: int64(len(v)&_LEN_MASK | cap(v)<<_CAP_BITS), - p: *(*unsafe.Pointer)(unsafe.Pointer(&v)), - } -} - -func (self *Node) setObject(v []Pair) { - self.t = types.V_OBJECT - self.setCapAndLen(cap(v), len(v)) - self.p = *(*unsafe.Pointer)(unsafe.Pointer(&v)) -} - -type parseObjectStack struct { - parser Parser - v []Pair -} - -type parseArrayStack struct { - parser Parser - v []Node + s := new(linkedPairs) + s.FromSlice(v) + return newObject(s) } -func newLazyArray(p *Parser, v []Node) Node { - s := new(parseArrayStack) - s.parser = *p - s.v = v - return Node{ - t: _V_ARRAY_LAZY, - v: int64(len(v)&_LEN_MASK | cap(v)<<_CAP_BITS), - p: unsafe.Pointer(s), +func newObject(v *linkedPairs) Node { + if v.size > _Threshold_Index { + v.BuildIndex() } -} - -func (self *Node) setLazyArray(p *Parser, v []Node) { - s := new(parseArrayStack) - s.parser = *p - s.v = v - self.t = _V_ARRAY_LAZY - self.setCapAndLen(cap(v), len(v)) - self.p = (unsafe.Pointer)(s) -} - -func newLazyObject(p *Parser, v []Pair) Node { - s := new(parseObjectStack) - s.parser = *p - s.v = v return Node{ - t: _V_OBJECT_LAZY, - v: int64(len(v)&_LEN_MASK | cap(v)<<_CAP_BITS), - p: unsafe.Pointer(s), + t: types.V_OBJECT, + l: uint(v.Len()), + p: unsafe.Pointer(v), } } -func (self *Node) setLazyObject(p *Parser, v []Pair) { - s := new(parseObjectStack) - s.parser = *p - s.v = v - self.t = _V_OBJECT_LAZY - self.setCapAndLen(cap(v), len(v)) - self.p = (unsafe.Pointer)(s) -} - -func newRawNode(str string, typ types.ValueType) Node { - return Node{ - t: _V_RAW | typ, - p: rt.StrPtr(str), - v: int64(len(str) & _LEN_MASK), +func (self *Node) setObject(v *linkedPairs) { + if v.size > _Threshold_Index { + v.BuildIndex() } + self.t = types.V_OBJECT + self.l = uint(v.Len()) + self.p = unsafe.Pointer(v) } func (self *Node) parseRaw(full bool) { - raw := rt.StrFrom(self.p, self.v) - parser := NewParser(raw) + lock := self.lock() + defer self.unlock() + if !self.isRaw() { + return + } + raw := self.toString() + parser := NewParserObj(raw) + var e types.ParsingError if full { parser.noLazy = true - parser.skipValue = false + *self, e = parser.Parse() + } else if lock { + var n Node + parser.noLazy = true + parser.loadOnce = true + n, e = parser.Parse() + self.assign(n) + } else { + *self, e = parser.Parse() } - var e types.ParsingError - *self, e = parser.Parse() if e != 0 { *self = *newSyntaxError(parser.syntaxError(e)) } } -func newError(err types.ParsingError, msg string) *Node { - return &Node{ - t: V_ERROR, - v: int64(err), - p: unsafe.Pointer(&msg), - } -} - -var typeJumpTable = [256]types.ValueType{ - '"' : types.V_STRING, - '-' : _V_NUMBER, - '0' : _V_NUMBER, - '1' : _V_NUMBER, - '2' : _V_NUMBER, - '3' : _V_NUMBER, - '4' : _V_NUMBER, - '5' : _V_NUMBER, - '6' : _V_NUMBER, - '7' : _V_NUMBER, - '8' : _V_NUMBER, - '9' : _V_NUMBER, - '[' : types.V_ARRAY, - 'f' : types.V_FALSE, - 'n' : types.V_NULL, - 't' : types.V_TRUE, - '{' : types.V_OBJECT, -} - -func switchRawType(c byte) types.ValueType { - return typeJumpTable[c] +func (self *Node) assign(n Node) { + self.l = n.l + self.p = n.p + atomic.StoreInt64(&self.t, n.t) } - -func unwrapError(err error) *Node { - if se, ok := err.(*Node); ok { - return se - }else if sse, ok := err.(Node); ok { - return &sse - } else { - msg := err.Error() - return &Node{ - t: V_ERROR, - v: 0, - p: unsafe.Pointer(&msg), - } - } -} \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/ast/parser.go b/vendor/github.com/bytedance/sonic/ast/parser.go index 0a8e7b068c9caa596417cca1d7815f0b69d206a1..30bd1f4519b4dd66ee0977529836fc1d91aa64b6 100644 --- a/vendor/github.com/bytedance/sonic/ast/parser.go +++ b/vendor/github.com/bytedance/sonic/ast/parser.go @@ -17,12 +17,18 @@ package ast import ( - `fmt` - `github.com/bytedance/sonic/internal/native/types` - `github.com/bytedance/sonic/internal/rt` + "fmt" + "sync" + "sync/atomic" + + "github.com/bytedance/sonic/internal/native/types" + "github.com/bytedance/sonic/internal/rt" ) -const _DEFAULT_NODE_CAP int = 16 +const ( + _DEFAULT_NODE_CAP int = 16 + _APPEND_GROW_SHIFT = 1 +) const ( _ERR_NOT_FOUND types.ParsingError = 33 @@ -30,7 +36,10 @@ const ( ) var ( + // ErrNotExist means both key and value doesn't exist ErrNotExist error = newError(_ERR_NOT_FOUND, "value not exists") + + // ErrUnsupportType means API on the node is unsupported ErrUnsupportType error = newError(_ERR_UNSUPPORT_TYPE, "unsupported type") ) @@ -38,7 +47,9 @@ type Parser struct { p int s string noLazy bool + loadOnce bool skipValue bool + dbuf *byte } /** Parser Private Methods **/ @@ -107,7 +118,11 @@ func (self *Parser) lspace(sp int) int { return sp } -func (self *Parser) decodeArray(ret []Node) (Node, types.ParsingError) { +func (self *Parser) backward() { + for ; self.p >= 0 && isSpace(self.s[self.p]); self.p-=1 {} +} + +func (self *Parser) decodeArray(ret *linkedNodes) (Node, types.ParsingError) { sp := self.p ns := len(self.s) @@ -119,7 +134,7 @@ func (self *Parser) decodeArray(ret []Node) (Node, types.ParsingError) { /* check for empty array */ if self.s[self.p] == ']' { self.p++ - return emptyArrayNode, 0 + return Node{t: types.V_ARRAY}, 0 } /* allocate array space and parse every element */ @@ -140,7 +155,7 @@ func (self *Parser) decodeArray(ret []Node) (Node, types.ParsingError) { if t == _V_NONE { return Node{}, types.ERR_INVALID_CHAR } - val = newRawNode(self.s[start:self.p], t) + val = newRawNode(self.s[start:self.p], t, false) }else{ /* decode the value */ if val, err = self.Parse(); err != 0 { @@ -149,7 +164,7 @@ func (self *Parser) decodeArray(ret []Node) (Node, types.ParsingError) { } /* add the value to result */ - ret = append(ret, val) + ret.Push(val) self.p = self.lspace(self.p) /* check for EOF */ @@ -160,17 +175,17 @@ func (self *Parser) decodeArray(ret []Node) (Node, types.ParsingError) { /* check for the next character */ switch self.s[self.p] { case ',' : self.p++ - case ']' : self.p++; return NewArray(ret), 0 - default: - if val.isLazy() { - return newLazyArray(self, ret), 0 - } - return Node{}, types.ERR_INVALID_CHAR + case ']' : self.p++; return newArray(ret), 0 + default: + // if val.isLazy() { + // return newLazyArray(self, ret), 0 + // } + return Node{}, types.ERR_INVALID_CHAR } } } -func (self *Parser) decodeObject(ret []Pair) (Node, types.ParsingError) { +func (self *Parser) decodeObject(ret *linkedPairs) (Node, types.ParsingError) { sp := self.p ns := len(self.s) @@ -182,7 +197,7 @@ func (self *Parser) decodeObject(ret []Pair) (Node, types.ParsingError) { /* check for empty object */ if self.s[self.p] == '}' { self.p++ - return emptyObjectNode, 0 + return Node{t: types.V_OBJECT}, 0 } /* decode each pair */ @@ -226,7 +241,7 @@ func (self *Parser) decodeObject(ret []Pair) (Node, types.ParsingError) { if t == _V_NONE { return Node{}, types.ERR_INVALID_CHAR } - val = newRawNode(self.s[start:self.p], t) + val = newRawNode(self.s[start:self.p], t, false) } else { /* decode the value */ if val, err = self.Parse(); err != 0 { @@ -235,7 +250,8 @@ func (self *Parser) decodeObject(ret []Pair) (Node, types.ParsingError) { } /* add the value to result */ - ret = append(ret, Pair{Key: key, Value: val}) + // FIXME: ret's address may change here, thus previous referred node in ret may be invalid !! + ret.Push(NewPair(key, val)) self.p = self.lspace(self.p) /* check for EOF */ @@ -246,11 +262,11 @@ func (self *Parser) decodeObject(ret []Pair) (Node, types.ParsingError) { /* check for the next character */ switch self.s[self.p] { case ',' : self.p++ - case '}' : self.p++; return NewObject(ret), 0 + case '}' : self.p++; return newObject(ret), 0 default: - if val.isLazy() { - return newLazyObject(self, ret), 0 - } + // if val.isLazy() { + // return newLazyObject(self, ret), 0 + // } return Node{}, types.ERR_INVALID_CHAR } } @@ -282,6 +298,10 @@ func (self *Parser) Pos() int { return self.p } + +// Parse returns a ast.Node representing the parser's JSON. +// NOTICE: the specific parsing lazy dependens parser's option +// It only parse first layer and first child for Object or Array be default func (self *Parser) Parse() (Node, types.ParsingError) { switch val := self.decodeValue(); val.Vt { case types.V_EOF : return Node{}, types.ERR_EOF @@ -290,15 +310,49 @@ func (self *Parser) Parse() (Node, types.ParsingError) { case types.V_FALSE : return falseNode, 0 case types.V_STRING : return self.decodeString(val.Iv, val.Ep) case types.V_ARRAY: + s := self.p - 1; + if p := skipBlank(self.s, self.p); p >= self.p && self.s[p] == ']' { + self.p = p + 1 + return Node{t: types.V_ARRAY}, 0 + } if self.noLazy { - return self.decodeArray(make([]Node, 0, _DEFAULT_NODE_CAP)) + if self.loadOnce { + self.noLazy = false + } + return self.decodeArray(new(linkedNodes)) + } + // NOTICE: loadOnce always keep raw json for object or array + if self.loadOnce { + self.p = s + s, e := self.skipFast() + if e != 0 { + return Node{}, e + } + return newRawNode(self.s[s:self.p], types.V_ARRAY, true), 0 } - return newLazyArray(self, make([]Node, 0, _DEFAULT_NODE_CAP)), 0 + return newLazyArray(self), 0 case types.V_OBJECT: + s := self.p - 1; + if p := skipBlank(self.s, self.p); p >= self.p && self.s[p] == '}' { + self.p = p + 1 + return Node{t: types.V_OBJECT}, 0 + } + // NOTICE: loadOnce always keep raw json for object or array if self.noLazy { - return self.decodeObject(make([]Pair, 0, _DEFAULT_NODE_CAP)) + if self.loadOnce { + self.noLazy = false + } + return self.decodeObject(new(linkedPairs)) } - return newLazyObject(self, make([]Pair, 0, _DEFAULT_NODE_CAP)), 0 + if self.loadOnce { + self.p = s + s, e := self.skipFast() + if e != 0 { + return Node{}, e + } + return newRawNode(self.s[s:self.p], types.V_OBJECT, true), 0 + } + return newLazyObject(self), 0 case types.V_DOUBLE : return NewNumber(self.s[val.Ep:self.p]), 0 case types.V_INTEGER : return NewNumber(self.s[val.Ep:self.p]), 0 default : return Node{}, types.ParsingError(-val.Vt) @@ -429,7 +483,7 @@ func (self *Node) skipNextNode() *Node { } parser, stack := self.getParserAndArrayStack() - ret := stack.v + ret := &stack.v sp := parser.p ns := len(parser.s) @@ -454,11 +508,12 @@ func (self *Node) skipNextNode() *Node { if t == _V_NONE { return newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR)) } - val = newRawNode(parser.s[start:parser.p], t) + val = newRawNode(parser.s[start:parser.p], t, false) } /* add the value to result */ - ret = append(ret, val) + ret.Push(val) + self.l++ parser.p = parser.lspace(parser.p) /* check for EOF */ @@ -470,12 +525,11 @@ func (self *Node) skipNextNode() *Node { switch parser.s[parser.p] { case ',': parser.p++ - self.setLazyArray(parser, ret) - return &ret[len(ret)-1] + return ret.At(ret.Len()-1) case ']': parser.p++ self.setArray(ret) - return &ret[len(ret)-1] + return ret.At(ret.Len()-1) default: return newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR)) } @@ -487,13 +541,13 @@ func (self *Node) skipNextPair() (*Pair) { } parser, stack := self.getParserAndObjectStack() - ret := stack.v + ret := &stack.v sp := parser.p ns := len(parser.s) /* check for EOF */ if parser.p = parser.lspace(sp); parser.p >= ns { - return &Pair{"", *newSyntaxError(parser.syntaxError(types.ERR_EOF))} + return newErrorPair(parser.syntaxError(types.ERR_EOF)) } /* check for empty object */ @@ -510,7 +564,7 @@ func (self *Node) skipNextPair() (*Pair) { /* decode the key */ if njs = parser.decodeValue(); njs.Vt != types.V_STRING { - return &Pair{"", *newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))} + return newErrorPair(parser.syntaxError(types.ERR_INVALID_CHAR)) } /* extract the key */ @@ -520,47 +574,47 @@ func (self *Node) skipNextPair() (*Pair) { /* check for escape sequence */ if njs.Ep != -1 { if key, err = unquote(key); err != 0 { - return &Pair{key, *newSyntaxError(parser.syntaxError(err))} + return newErrorPair(parser.syntaxError(err)) } } /* expect a ':' delimiter */ if err = parser.delim(); err != 0 { - return &Pair{key, *newSyntaxError(parser.syntaxError(err))} + return newErrorPair(parser.syntaxError(err)) } /* skip the value */ if start, err := parser.skipFast(); err != 0 { - return &Pair{key, *newSyntaxError(parser.syntaxError(err))} + return newErrorPair(parser.syntaxError(err)) } else { t := switchRawType(parser.s[start]) if t == _V_NONE { - return &Pair{key, *newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))} + return newErrorPair(parser.syntaxError(types.ERR_INVALID_CHAR)) } - val = newRawNode(parser.s[start:parser.p], t) + val = newRawNode(parser.s[start:parser.p], t, false) } /* add the value to result */ - ret = append(ret, Pair{Key: key, Value: val}) + ret.Push(NewPair(key, val)) + self.l++ parser.p = parser.lspace(parser.p) /* check for EOF */ if parser.p >= ns { - return &Pair{key, *newSyntaxError(parser.syntaxError(types.ERR_EOF))} + return newErrorPair(parser.syntaxError(types.ERR_EOF)) } /* check for the next character */ switch parser.s[parser.p] { case ',': parser.p++ - self.setLazyObject(parser, ret) - return &ret[len(ret)-1] + return ret.At(ret.Len()-1) case '}': parser.p++ self.setObject(ret) - return &ret[len(ret)-1] + return ret.At(ret.Len()-1) default: - return &Pair{key, *newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))} + return newErrorPair(parser.syntaxError(types.ERR_INVALID_CHAR)) } } @@ -601,10 +655,30 @@ func LoadsUseNumber(src string) (int, interface{}, error) { } } +// NewParser returns pointer of new allocated parser func NewParser(src string) *Parser { return &Parser{s: src} } +// NewParser returns new allocated parser +func NewParserObj(src string) Parser { + return Parser{s: src} +} + +// decodeNumber controls if parser decodes the number values instead of skip them +// WARN: once you set decodeNumber(true), please set decodeNumber(false) before you drop the parser +// otherwise the memory CANNOT be reused +func (self *Parser) decodeNumber(decode bool) { + if !decode && self.dbuf != nil { + types.FreeDbuf(self.dbuf) + self.dbuf = nil + return + } + if decode && self.dbuf == nil { + self.dbuf = types.NewDbuf() + } +} + // ExportError converts types.ParsingError to std Error func (self *Parser) ExportError(err types.ParsingError) error { if err == _ERR_NOT_FOUND { @@ -615,4 +689,78 @@ func (self *Parser) ExportError(err types.ParsingError) error { Src : self.s, Code: err, }.Description()) -} \ No newline at end of file +} + +func backward(src string, i int) int { + for ; i>=0 && isSpace(src[i]); i-- {} + return i +} + + +func newRawNode(str string, typ types.ValueType, lock bool) Node { + ret := Node{ + t: typ | _V_RAW, + p: rt.StrPtr(str), + l: uint(len(str)), + } + if lock { + ret.m = new(sync.RWMutex) + } + return ret +} + +var typeJumpTable = [256]types.ValueType{ + '"' : types.V_STRING, + '-' : _V_NUMBER, + '0' : _V_NUMBER, + '1' : _V_NUMBER, + '2' : _V_NUMBER, + '3' : _V_NUMBER, + '4' : _V_NUMBER, + '5' : _V_NUMBER, + '6' : _V_NUMBER, + '7' : _V_NUMBER, + '8' : _V_NUMBER, + '9' : _V_NUMBER, + '[' : types.V_ARRAY, + 'f' : types.V_FALSE, + 'n' : types.V_NULL, + 't' : types.V_TRUE, + '{' : types.V_OBJECT, +} + +func switchRawType(c byte) types.ValueType { + return typeJumpTable[c] +} + +func (self *Node) loadt() types.ValueType { + return (types.ValueType)(atomic.LoadInt64(&self.t)) +} + +func (self *Node) lock() bool { + if m := self.m; m != nil { + m.Lock() + return true + } + return false +} + +func (self *Node) unlock() { + if m := self.m; m != nil { + m.Unlock() + } +} + +func (self *Node) rlock() bool { + if m := self.m; m != nil { + m.RLock() + return true + } + return false +} + +func (self *Node) runlock() { + if m := self.m; m != nil { + m.RUnlock() + } +} diff --git a/vendor/github.com/bytedance/sonic/ast/search.go b/vendor/github.com/bytedance/sonic/ast/search.go index bb6fceaa7cb3dbe0665c21deab6f490a9d42655a..9a5fb94203651bd2b2f3c1235a37ab1e61aa334f 100644 --- a/vendor/github.com/bytedance/sonic/ast/search.go +++ b/vendor/github.com/bytedance/sonic/ast/search.go @@ -16,8 +16,28 @@ package ast +import ( + `github.com/bytedance/sonic/internal/rt` + `github.com/bytedance/sonic/internal/native/types` +) + +// SearchOptions controls Searcher's behavior +type SearchOptions struct { + // ValidateJSON indicates the searcher to validate the entire JSON + ValidateJSON bool + + // CopyReturn indicates the searcher to copy the result JSON instead of refer from the input + // This can help to reduce memory usage if you cache the results + CopyReturn bool + + // ConcurrentRead indicates the searcher to return a concurrently-READ-safe node, + // including: GetByPath/Get/Index/GetOrIndex/Int64/Bool/Float64/String/Number/Interface/Array/Map/Raw/MarshalJSON + ConcurrentRead bool +} + type Searcher struct { parser Parser + SearchOptions } func NewSearcher(str string) *Searcher { @@ -26,5 +46,112 @@ func NewSearcher(str string) *Searcher { s: str, noLazy: false, }, + SearchOptions: SearchOptions{ + ValidateJSON: true, + }, + } +} + +// GetByPathCopy search in depth from top json and returns a **Copied** json node at the path location +func (self *Searcher) GetByPathCopy(path ...interface{}) (Node, error) { + self.CopyReturn = true + return self.getByPath(path...) +} + +// GetByPathNoCopy search in depth from top json and returns a **Referenced** json node at the path location +// +// WARN: this search directly refer partial json from top json, which has faster speed, +// may consumes more memory. +func (self *Searcher) GetByPath(path ...interface{}) (Node, error) { + return self.getByPath(path...) +} + +func (self *Searcher) getByPath(path ...interface{}) (Node, error) { + var err types.ParsingError + var start int + + self.parser.p = 0 + start, err = self.parser.getByPath(self.ValidateJSON, path...) + if err != 0 { + // for compatibility with old version + if err == types.ERR_NOT_FOUND { + return Node{}, ErrNotExist + } + if err == types.ERR_UNSUPPORT_TYPE { + panic("path must be either int(>=0) or string") + } + return Node{}, self.parser.syntaxError(err) + } + + t := switchRawType(self.parser.s[start]) + if t == _V_NONE { + return Node{}, self.parser.ExportError(err) + } + + // copy string to reducing memory usage + var raw string + if self.CopyReturn { + raw = rt.Mem2Str([]byte(self.parser.s[start:self.parser.p])) + } else { + raw = self.parser.s[start:self.parser.p] + } + return newRawNode(raw, t, self.ConcurrentRead), nil +} + +// GetByPath searches a path and returns relaction and types of target +func _GetByPath(src string, path ...interface{}) (start int, end int, typ int, err error) { + p := NewParserObj(src) + s, e := p.getByPath(false, path...) + if e != 0 { + // for compatibility with old version + if e == types.ERR_NOT_FOUND { + return -1, -1, 0, ErrNotExist + } + if e == types.ERR_UNSUPPORT_TYPE { + panic("path must be either int(>=0) or string") + } + return -1, -1, 0, p.syntaxError(e) + } + + t := switchRawType(p.s[s]) + if t == _V_NONE { + return -1, -1, 0, ErrNotExist + } + if t == _V_NUMBER { + p.p = 1 + backward(p.s, p.p-1) + } + return s, p.p, int(t), nil +} + +// ValidSyntax check if a json has a valid JSON syntax, +// while not validate UTF-8 charset +func _ValidSyntax(json string) bool { + p := NewParserObj(json) + _, e := p.skip() + if e != 0 { + return false + } + if skipBlank(p.s, p.p) != -int(types.ERR_EOF) { + return false + } + return true +} + +// SkipFast skip a json value in fast-skip algs, +// while not strictly validate JSON syntax and UTF-8 charset. +func _SkipFast(src string, i int) (int, int, error) { + p := NewParserObj(src) + p.p = i + s, e := p.skipFast() + if e != 0 { + return -1, -1, p.ExportError(e) + } + t := switchRawType(p.s[s]) + if t == _V_NONE { + return -1, -1, ErrNotExist + } + if t == _V_NUMBER { + p.p = 1 + backward(p.s, p.p-1) } + return s, p.p, nil } diff --git a/vendor/github.com/bytedance/sonic/ast/sort.go b/vendor/github.com/bytedance/sonic/ast/sort.go deleted file mode 100644 index 0a9f1455987ee46a808250a5a0c2bbeff5faec37..0000000000000000000000000000000000000000 --- a/vendor/github.com/bytedance/sonic/ast/sort.go +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package ast - -// Algorithm 3-way Radix Quicksort, d means the radix. -// Reference: https://algs4.cs.princeton.edu/51radix/Quick3string.java.html -func radixQsort(kvs PairSlice, d, maxDepth int) { - for len(kvs) > 11 { - // To avoid the worst case of quickSort (time: O(n^2)), use introsort here. - // Reference: https://en.wikipedia.org/wiki/Introsort and - // https://github.com/golang/go/issues/467 - if maxDepth == 0 { - heapSort(kvs, 0, len(kvs)) - return - } - maxDepth-- - - p := pivot(kvs, d) - lt, i, gt := 0, 0, len(kvs) - for i < gt { - c := byteAt(kvs[i].Key, d) - if c < p { - swap(kvs, lt, i) - i++ - lt++ - } else if c > p { - gt-- - swap(kvs, i, gt) - } else { - i++ - } - } - - // kvs[0:lt] < v = kvs[lt:gt] < kvs[gt:len(kvs)] - // Native implemention: - // radixQsort(kvs[:lt], d, maxDepth) - // if p > -1 { - // radixQsort(kvs[lt:gt], d+1, maxDepth) - // } - // radixQsort(kvs[gt:], d, maxDepth) - // Optimize as follows: make recursive calls only for the smaller parts. - // Reference: https://www.geeksforgeeks.org/quicksort-tail-call-optimization-reducing-worst-case-space-log-n/ - if p == -1 { - if lt > len(kvs) - gt { - radixQsort(kvs[gt:], d, maxDepth) - kvs = kvs[:lt] - } else { - radixQsort(kvs[:lt], d, maxDepth) - kvs = kvs[gt:] - } - } else { - ml := maxThree(lt, gt-lt, len(kvs)-gt) - if ml == lt { - radixQsort(kvs[lt:gt], d+1, maxDepth) - radixQsort(kvs[gt:], d, maxDepth) - kvs = kvs[:lt] - } else if ml == gt-lt { - radixQsort(kvs[:lt], d, maxDepth) - radixQsort(kvs[gt:], d, maxDepth) - kvs = kvs[lt:gt] - d += 1 - } else { - radixQsort(kvs[:lt], d, maxDepth) - radixQsort(kvs[lt:gt], d+1, maxDepth) - kvs = kvs[gt:] - } - } - } - insertRadixSort(kvs, d) -} - -func insertRadixSort(kvs PairSlice, d int) { - for i := 1; i < len(kvs); i++ { - for j := i; j > 0 && lessFrom(kvs[j].Key, kvs[j-1].Key, d); j-- { - swap(kvs, j, j-1) - } - } -} - -func pivot(kvs PairSlice, d int) int { - m := len(kvs) >> 1 - if len(kvs) > 40 { - // Tukey's ``Ninther,'' median of three mediankvs of three. - t := len(kvs) / 8 - return medianThree( - medianThree(byteAt(kvs[0].Key, d), byteAt(kvs[t].Key, d), byteAt(kvs[2*t].Key, d)), - medianThree(byteAt(kvs[m].Key, d), byteAt(kvs[m-t].Key, d), byteAt(kvs[m+t].Key, d)), - medianThree(byteAt(kvs[len(kvs)-1].Key, d), - byteAt(kvs[len(kvs)-1-t].Key, d), - byteAt(kvs[len(kvs)-1-2*t].Key, d))) - } - return medianThree(byteAt(kvs[0].Key, d), byteAt(kvs[m].Key, d), byteAt(kvs[len(kvs)-1].Key, d)) -} - -func medianThree(i, j, k int) int { - if i > j { - i, j = j, i - } // i < j - if k < i { - return i - } - if k > j { - return j - } - return k -} - -func maxThree(i, j, k int) int { - max := i - if max < j { - max = j - } - if max < k { - max = k - } - return max -} - -// maxDepth returns a threshold at which quicksort should switch -// to heapsort. It returnkvs 2*ceil(lg(n+1)). -func maxDepth(n int) int { - var depth int - for i := n; i > 0; i >>= 1 { - depth++ - } - return depth * 2 -} - -// siftDown implements the heap property on kvs[lo:hi]. -// first is an offset into the array where the root of the heap lies. -func siftDown(kvs PairSlice, lo, hi, first int) { - root := lo - for { - child := 2*root + 1 - if child >= hi { - break - } - if child+1 < hi && kvs[first+child].Key < kvs[first+child+1].Key { - child++ - } - if kvs[first+root].Key >= kvs[first+child].Key { - return - } - swap(kvs, first+root, first+child) - root = child - } -} - -func heapSort(kvs PairSlice, a, b int) { - first := a - lo := 0 - hi := b - a - - // Build heap with the greatest element at top. - for i := (hi - 1) / 2; i >= 0; i-- { - siftDown(kvs, i, hi, first) - } - - // Pop elements, the largest first, into end of kvs. - for i := hi - 1; i >= 0; i-- { - swap(kvs, first, first+i) - siftDown(kvs, lo, i, first) - } -} - -// Note that Pair.Key is NOT pointed to Pair.m when map key is integer after swap -func swap(kvs PairSlice, a, b int) { - kvs[a].Key, kvs[b].Key = kvs[b].Key, kvs[a].Key - kvs[a].Value, kvs[b].Value = kvs[b].Value, kvs[a].Value -} - -// Compare two strings from the pos d. -func lessFrom(a, b string, d int) bool { - l := len(a) - if l > len(b) { - l = len(b) - } - for i := d; i < l; i++ { - if a[i] == b[i] { - continue - } - return a[i] < b[i] - } - return len(a) < len(b) -} - -func byteAt(b string, p int) int { - if p < len(b) { - return int(b[p]) - } - return -1 -} diff --git a/vendor/github.com/bytedance/sonic/ast/stubs_go115.go b/vendor/github.com/bytedance/sonic/ast/stubs.go similarity index 37% rename from vendor/github.com/bytedance/sonic/ast/stubs_go115.go rename to vendor/github.com/bytedance/sonic/ast/stubs.go index 37b9451f08d31e218a668c225632c831d7b48dab..53bf3b8aa0661ce9976af3d319fad2d52dd042da 100644 --- a/vendor/github.com/bytedance/sonic/ast/stubs_go115.go +++ b/vendor/github.com/bytedance/sonic/ast/stubs.go @@ -1,5 +1,3 @@ -// +build !go1.20 - /* * Copyright 2021 ByteDance Inc. * @@ -19,10 +17,10 @@ package ast import ( - `unsafe` - `unicode/utf8` + "unicode/utf8" + "unsafe" - `github.com/bytedance/sonic/internal/rt` + "github.com/bytedance/sonic/internal/rt" ) //go:noescape @@ -34,22 +32,111 @@ func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr) //goland:noinspection GoUnusedParameter func unsafe_NewArray(typ *rt.GoType, n int) unsafe.Pointer -//go:linkname growslice runtime.growslice -//goland:noinspection GoUnusedParameter -func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice - //go:nosplit func mem2ptr(s []byte) unsafe.Pointer { return (*rt.GoSlice)(unsafe.Pointer(&s)).Ptr } -var ( - //go:linkname safeSet encoding/json.safeSet - safeSet [utf8.RuneSelf]bool +var safeSet = [utf8.RuneSelf]bool{ + ' ': true, + '!': true, + '"': false, + '#': true, + '$': true, + '%': true, + '&': true, + '\'': true, + '(': true, + ')': true, + '*': true, + '+': true, + ',': true, + '-': true, + '.': true, + '/': true, + '0': true, + '1': true, + '2': true, + '3': true, + '4': true, + '5': true, + '6': true, + '7': true, + '8': true, + '9': true, + ':': true, + ';': true, + '<': true, + '=': true, + '>': true, + '?': true, + '@': true, + 'A': true, + 'B': true, + 'C': true, + 'D': true, + 'E': true, + 'F': true, + 'G': true, + 'H': true, + 'I': true, + 'J': true, + 'K': true, + 'L': true, + 'M': true, + 'N': true, + 'O': true, + 'P': true, + 'Q': true, + 'R': true, + 'S': true, + 'T': true, + 'U': true, + 'V': true, + 'W': true, + 'X': true, + 'Y': true, + 'Z': true, + '[': true, + '\\': false, + ']': true, + '^': true, + '_': true, + '`': true, + 'a': true, + 'b': true, + 'c': true, + 'd': true, + 'e': true, + 'f': true, + 'g': true, + 'h': true, + 'i': true, + 'j': true, + 'k': true, + 'l': true, + 'm': true, + 'n': true, + 'o': true, + 'p': true, + 'q': true, + 'r': true, + 's': true, + 't': true, + 'u': true, + 'v': true, + 'w': true, + 'x': true, + 'y': true, + 'z': true, + '{': true, + '|': true, + '}': true, + '~': true, + '\u007f': true, +} - //go:linkname hex encoding/json.hex - hex string -) +var hex = "0123456789abcdef" //go:linkname unquoteBytes encoding/json.unquoteBytes -func unquoteBytes(s []byte) (t []byte, ok bool) \ No newline at end of file +func unquoteBytes(s []byte) (t []byte, ok bool) diff --git a/vendor/github.com/bytedance/sonic/ast/visitor.go b/vendor/github.com/bytedance/sonic/ast/visitor.go new file mode 100644 index 0000000000000000000000000000000000000000..dc04785133b3c9bf63464a75706a22a0671b2e8e --- /dev/null +++ b/vendor/github.com/bytedance/sonic/ast/visitor.go @@ -0,0 +1,332 @@ +/* + * Copyright 2021 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package ast + +import ( + `encoding/json` + `errors` + + `github.com/bytedance/sonic/internal/native/types` +) + +// Visitor handles the callbacks during preorder traversal of a JSON AST. +// +// According to the JSON RFC8259, a JSON AST can be defined by +// the following rules without separator / whitespace tokens. +// +// JSON-AST = value +// value = false / null / true / object / array / number / string +// object = begin-object [ member *( member ) ] end-object +// member = string value +// array = begin-array [ value *( value ) ] end-array +// +type Visitor interface { + + // OnNull handles a JSON null value. + OnNull() error + + // OnBool handles a JSON true / false value. + OnBool(v bool) error + + // OnString handles a JSON string value. + OnString(v string) error + + // OnInt64 handles a JSON number value with int64 type. + OnInt64(v int64, n json.Number) error + + // OnFloat64 handles a JSON number value with float64 type. + OnFloat64(v float64, n json.Number) error + + // OnObjectBegin handles the beginning of a JSON object value with a + // suggested capacity that can be used to make your custom object container. + // + // After this point the visitor will receive a sequence of callbacks like + // [string, value, string, value, ......, ObjectEnd]. + // + // Note: + // 1. This is a recursive definition which means the value can + // also be a JSON object / array described by a sequence of callbacks. + // 2. The suggested capacity will be 0 if current object is empty. + // 3. Currently sonic use a fixed capacity for non-empty object (keep in + // sync with ast.Node) which might not be very suitable. This may be + // improved in future version. + OnObjectBegin(capacity int) error + + // OnObjectKey handles a JSON object key string in member. + OnObjectKey(key string) error + + // OnObjectEnd handles the ending of a JSON object value. + OnObjectEnd() error + + // OnArrayBegin handles the beginning of a JSON array value with a + // suggested capacity that can be used to make your custom array container. + // + // After this point the visitor will receive a sequence of callbacks like + // [value, value, value, ......, ArrayEnd]. + // + // Note: + // 1. This is a recursive definition which means the value can + // also be a JSON object / array described by a sequence of callbacks. + // 2. The suggested capacity will be 0 if current array is empty. + // 3. Currently sonic use a fixed capacity for non-empty array (keep in + // sync with ast.Node) which might not be very suitable. This may be + // improved in future version. + OnArrayBegin(capacity int) error + + // OnArrayEnd handles the ending of a JSON array value. + OnArrayEnd() error +} + +// VisitorOptions contains all Visitor's options. The default value is an +// empty VisitorOptions{}. +type VisitorOptions struct { + // OnlyNumber indicates parser to directly return number value without + // conversion, then the first argument of OnInt64 / OnFloat64 will always + // be zero. + OnlyNumber bool +} + +var defaultVisitorOptions = &VisitorOptions{} + +// Preorder decodes the whole JSON string and callbacks each AST node to visitor +// during preorder traversal. Any visitor method with an error returned will +// break the traversal and the given error will be directly returned. The opts +// argument can be reused after every call. +func Preorder(str string, visitor Visitor, opts *VisitorOptions) error { + if opts == nil { + opts = defaultVisitorOptions + } + // process VisitorOptions first to guarantee that all options will be + // constant during decoding and make options more readable. + var ( + optDecodeNumber = !opts.OnlyNumber + ) + + tv := &traverser{ + parser: Parser{ + s: str, + noLazy: true, + skipValue: false, + }, + visitor: visitor, + } + + if optDecodeNumber { + tv.parser.decodeNumber(true) + } + + err := tv.decodeValue() + + if optDecodeNumber { + tv.parser.decodeNumber(false) + } + return err +} + +type traverser struct { + parser Parser + visitor Visitor +} + +// NOTE: keep in sync with (*Parser).Parse method. +func (self *traverser) decodeValue() error { + switch val := self.parser.decodeValue(); val.Vt { + case types.V_EOF: + return types.ERR_EOF + case types.V_NULL: + return self.visitor.OnNull() + case types.V_TRUE: + return self.visitor.OnBool(true) + case types.V_FALSE: + return self.visitor.OnBool(false) + case types.V_STRING: + return self.decodeString(val.Iv, val.Ep) + case types.V_DOUBLE: + return self.visitor.OnFloat64(val.Dv, + json.Number(self.parser.s[val.Ep:self.parser.p])) + case types.V_INTEGER: + return self.visitor.OnInt64(val.Iv, + json.Number(self.parser.s[val.Ep:self.parser.p])) + case types.V_ARRAY: + return self.decodeArray() + case types.V_OBJECT: + return self.decodeObject() + default: + return types.ParsingError(-val.Vt) + } +} + +// NOTE: keep in sync with (*Parser).decodeArray method. +func (self *traverser) decodeArray() error { + sp := self.parser.p + ns := len(self.parser.s) + + /* allocate array space and parse every element */ + if err := self.visitor.OnArrayBegin(_DEFAULT_NODE_CAP); err != nil { + if err == VisitOPSkip { + // NOTICE: for user needs to skip entiry object + self.parser.p -= 1 + if _, e := self.parser.skipFast(); e != 0 { + return e + } + return self.visitor.OnArrayEnd() + } + return err + } + + /* check for EOF */ + self.parser.p = self.parser.lspace(sp) + if self.parser.p >= ns { + return types.ERR_EOF + } + + /* check for empty array */ + if self.parser.s[self.parser.p] == ']' { + self.parser.p++ + return self.visitor.OnArrayEnd() + } + + for { + /* decode the value */ + if err := self.decodeValue(); err != nil { + return err + } + self.parser.p = self.parser.lspace(self.parser.p) + + /* check for EOF */ + if self.parser.p >= ns { + return types.ERR_EOF + } + + /* check for the next character */ + switch self.parser.s[self.parser.p] { + case ',': + self.parser.p++ + case ']': + self.parser.p++ + return self.visitor.OnArrayEnd() + default: + return types.ERR_INVALID_CHAR + } + } +} + +// NOTE: keep in sync with (*Parser).decodeObject method. +func (self *traverser) decodeObject() error { + sp := self.parser.p + ns := len(self.parser.s) + + /* allocate object space and decode each pair */ + if err := self.visitor.OnObjectBegin(_DEFAULT_NODE_CAP); err != nil { + if err == VisitOPSkip { + // NOTICE: for user needs to skip entiry object + self.parser.p -= 1 + if _, e := self.parser.skipFast(); e != 0 { + return e + } + return self.visitor.OnObjectEnd() + } + return err + } + + /* check for EOF */ + self.parser.p = self.parser.lspace(sp) + if self.parser.p >= ns { + return types.ERR_EOF + } + + /* check for empty object */ + if self.parser.s[self.parser.p] == '}' { + self.parser.p++ + return self.visitor.OnObjectEnd() + } + + for { + var njs types.JsonState + var err types.ParsingError + + /* decode the key */ + if njs = self.parser.decodeValue(); njs.Vt != types.V_STRING { + return types.ERR_INVALID_CHAR + } + + /* extract the key */ + idx := self.parser.p - 1 + key := self.parser.s[njs.Iv:idx] + + /* check for escape sequence */ + if njs.Ep != -1 { + if key, err = unquote(key); err != 0 { + return err + } + } + + if err := self.visitor.OnObjectKey(key); err != nil { + return err + } + + /* expect a ':' delimiter */ + if err = self.parser.delim(); err != 0 { + return err + } + + /* decode the value */ + if err := self.decodeValue(); err != nil { + return err + } + + self.parser.p = self.parser.lspace(self.parser.p) + + /* check for EOF */ + if self.parser.p >= ns { + return types.ERR_EOF + } + + /* check for the next character */ + switch self.parser.s[self.parser.p] { + case ',': + self.parser.p++ + case '}': + self.parser.p++ + return self.visitor.OnObjectEnd() + default: + return types.ERR_INVALID_CHAR + } + } +} + +// NOTE: keep in sync with (*Parser).decodeString method. +func (self *traverser) decodeString(iv int64, ep int) error { + p := self.parser.p - 1 + s := self.parser.s[iv:p] + + /* fast path: no escape sequence */ + if ep == -1 { + return self.visitor.OnString(s) + } + + /* unquote the string */ + out, err := unquote(s) + if err != 0 { + return err + } + return self.visitor.OnString(out) +} + +// If visitor return this error on `OnObjectBegin()` or `OnArrayBegin()`, +// the transverer will skip entiry object or array +var VisitOPSkip = errors.New("") diff --git a/vendor/github.com/bytedance/sonic/bench-arm.sh b/vendor/github.com/bytedance/sonic/bench-arm.sh deleted file mode 100644 index b47d6278a0e42121dde5f20d0800b801b25af51d..0000000000000000000000000000000000000000 --- a/vendor/github.com/bytedance/sonic/bench-arm.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env bash - -pwd=$(pwd) -export SONIC_NO_ASYNC_GC=1 - -cd $pwd/ast -go test -benchmem -run=^$ -benchtime=1000000x -bench "^(BenchmarkGet.*|BenchmarkSet.*)$" - -go test -benchmem -run=^$ -benchtime=10000x -bench "^(BenchmarkParser_.*|BenchmarkEncode.*)$" - -go test -benchmem -run=^$ -benchtime=10000000x -bench "^(BenchmarkNodeGetByPath|BenchmarkStructGetByPath|BenchmarkNodeIndex|BenchmarkStructIndex|BenchmarkSliceIndex|BenchmarkMapIndex|BenchmarkNodeGet|BenchmarkSliceGet|BenchmarkMapGet|BenchmarkNodeSet|BenchmarkMapSet|BenchmarkNodeSetByIndex|BenchmarkSliceSetByIndex|BenchmarkStructSetByIndex|BenchmarkNodeUnset|BenchmarkMapUnset|BenchmarkNodUnsetByIndex|BenchmarkSliceUnsetByIndex|BenchmarkNodeAdd|BenchmarkSliceAdd|BenchmarkMapAdd)$" - -unset SONIC_NO_ASYNC_GC -cd $pwd \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/bench.py b/vendor/github.com/bytedance/sonic/bench.py deleted file mode 100644 index 1d4c35739f3a13e2b5f44570ead8b1fc05c11da3..0000000000000000000000000000000000000000 --- a/vendor/github.com/bytedance/sonic/bench.py +++ /dev/null @@ -1,134 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2022 ByteDance Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import tempfile -import os -import subprocess -import argparse - -gbench_prefix = "SONIC_NO_ASYNC_GC=1 go test -benchmem -run=none " - -def run(cmd): - print(cmd) - if os.system(cmd): - print ("Failed to run cmd: %s"%(cmd)) - exit(1) - -def run_s(cmd): - print (cmd) - try: - res = os.popen(cmd) - except subprocess.CalledProcessError as e: - if e.returncode: - print (e.output) - exit(1) - return res.read() - -def run_r(cmd): - print (cmd) - try: - cmds = cmd.split(' ') - data = subprocess.check_output(cmds, stderr=subprocess.STDOUT) - except subprocess.CalledProcessError as e: - if e.returncode: - print (e.output) - exit(1) - return data.decode("utf-8") - -def compare(args): - # detech current branch. - # result = run_r("git branch") - current_branch = run_s("git status | head -n1 | sed 's/On branch //'") - # for br in result.split('\n'): - # if br.startswith("* "): - # current_branch = br.lstrip('* ') - # break - - if not current_branch: - print ("Failed to detech current branch") - return None - - # get the current diff - (fd, diff) = tempfile.mkstemp() - run("git diff > %s"%diff) - - # early return if currrent is main branch. - print ("Current branch: %s"%(current_branch)) - if current_branch == "main": - print ("Cannot compare at the main branch.Please build a new branch") - return None - - # benchmark current branch - (fd, target) = tempfile.mkstemp(".target.txt") - run("%s %s ./... 2>&1 | tee %s" %(gbench_prefix, args, target)) - - # trying to switch to the latest main branch - run("git checkout -- .") - if current_branch != "main": - run("git checkout main") - run("git pull --allow-unrelated-histories origin main") - - # benchmark main branch - (fd, main) = tempfile.mkstemp(".main.txt") - run("%s %s ./... 2>&1 | tee %s" %(gbench_prefix, args, main)) - - # diff the result - # benchstat = "go get golang.org/x/perf/cmd/benchstat && go install golang.org/x/perf/cmd/benchstat" - run( "benchstat -sort=delta %s %s"%(main, target)) - run("git checkout -- .") - - # restore branch - if current_branch != "main": - run("git checkout %s"%(current_branch)) - run("patch -p1 < %s" % (diff)) - return target - -def main(): - argparser = argparse.ArgumentParser(description='Tools to test the performance. Example: ./bench.py -b Decoder_Generic_Sonic -c') - argparser.add_argument('-b', '--bench', dest='filter', required=False, - help='Specify the filter for golang benchmark') - argparser.add_argument('-c', '--compare', dest='compare', action='store_true', required=False, - help='Compare with the main benchmarking') - argparser.add_argument('-t', '--times', dest='times', required=False, - help='benchmark the times') - argparser.add_argument('-r', '--repeat_times', dest='count', required=False, - help='benchmark the count') - args = argparser.parse_args() - - if args.filter: - gbench_args = "-bench=%s"%(args.filter) - else: - gbench_args = "-bench=." - - if args.times: - gbench_args += " -benchtime=%s"%(args.times) - - if args.count: - gbench_args += " -count=%s"%(args.count) - else: - gbench_args += " -count=10" - - if args.compare: - target = compare(gbench_args) - else: - target = None - - if not target: - (fd, target) = tempfile.mkstemp(".target.txt") - run("%s %s ./... 2>&1 | tee %s" %(gbench_prefix, gbench_args, target)) - -if __name__ == "__main__": - main() diff --git a/vendor/github.com/bytedance/sonic/bench.sh b/vendor/github.com/bytedance/sonic/bench.sh deleted file mode 100644 index 701986b58a72b13bc6b00052f8adae94c0034127..0000000000000000000000000000000000000000 --- a/vendor/github.com/bytedance/sonic/bench.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env bash - -pwd=$(pwd) -export SONIC_NO_ASYNC_GC=1 - -cd $pwd/encoder -go test -benchmem -run=^$ -benchtime=100000x -bench "^(BenchmarkEncoder_.*)$" - -cd $pwd/decoder -go test -benchmem -run=^$ -benchtime=100000x -bench "^(BenchmarkDecoder_.*)$" - -cd $pwd/ast -go test -benchmem -run=^$ -benchtime=1000000x -bench "^(BenchmarkGet.*|BenchmarkSet.*)$" - -go test -benchmem -run=^$ -benchtime=10000x -bench "^(BenchmarkParser_.*|BenchmarkEncode.*)$" - -go test -benchmem -run=^$ -benchtime=10000000x -bench "^(BenchmarkNodeGetByPath|BenchmarkStructGetByPath|BenchmarkNodeIndex|BenchmarkStructIndex|BenchmarkSliceIndex|BenchmarkMapIndex|BenchmarkNodeGet|BenchmarkSliceGet|BenchmarkMapGet|BenchmarkNodeSet|BenchmarkMapSet|BenchmarkNodeSetByIndex|BenchmarkSliceSetByIndex|BenchmarkStructSetByIndex|BenchmarkNodeUnset|BenchmarkMapUnset|BenchmarkNodUnsetByIndex|BenchmarkSliceUnsetByIndex|BenchmarkNodeAdd|BenchmarkSliceAdd|BenchmarkMapAdd)$" - -cd $pwd/external_jsonlib_test/benchmark_test -go test -benchmem -run=^$ -benchtime=100000x -bench "^(BenchmarkEncoder_.*|BenchmarkDecoder_.*)$" - -go test -benchmem -run=^$ -benchtime=1000000x -bench "^(BenchmarkGet.*|BenchmarkSet.*)$" - -go test -benchmem -run=^$ -benchtime=10000x -bench "^(BenchmarkParser_.*)$" - -unset SONIC_NO_ASYNC_GC -cd $pwd \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/compat.go b/vendor/github.com/bytedance/sonic/compat.go index 015aa62bfb1f10730bd0c74685819ea949355d82..b32342a84aa5dbeebccda02987216822396c13df 100644 --- a/vendor/github.com/bytedance/sonic/compat.go +++ b/vendor/github.com/bytedance/sonic/compat.go @@ -1,4 +1,4 @@ -// +build !amd64 go1.21 +// +build !amd64,!arm64 go1.24 !go1.17 arm64,!go1.20 /* * Copyright 2021 ByteDance Inc. @@ -27,6 +27,8 @@ import ( `github.com/bytedance/sonic/option` ) +const apiKind = UseStdJSON + type frozenConfig struct { Config } diff --git a/vendor/github.com/bytedance/sonic/decoder/decoder_compat.go b/vendor/github.com/bytedance/sonic/decoder/decoder_compat.go index e6b9463d700f90ec3506a1b4ee8fbde0c3678bd8..81e1ae4e31dd71e35ecfdd719c9baf935b5a4b63 100644 --- a/vendor/github.com/bytedance/sonic/decoder/decoder_compat.go +++ b/vendor/github.com/bytedance/sonic/decoder/decoder_compat.go @@ -1,4 +1,4 @@ -// +build !amd64 go1.21 +// +build !amd64,!arm64 go1.24 !go1.17 arm64,!go1.20 /* * Copyright 2023 ByteDance Inc. @@ -14,28 +14,35 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. -*/ + */ package decoder import ( - `encoding/json` - `bytes` - `reflect` - `github.com/bytedance/sonic/internal/native/types` - `github.com/bytedance/sonic/option` - `io` + `bytes` + `encoding/json` + `io` + `reflect` + `unsafe` + + `github.com/bytedance/sonic/internal/native/types` + `github.com/bytedance/sonic/option` ) +func init() { + println("WARNING: sonic/decoder only supports (Go1.17~1.23 && CPU amd64) or (go1.20~1.23 && CPU arm64), but your environment is not suitable") +} + const ( - _F_use_int64 = iota - _F_use_number - _F_disable_urc - _F_disable_unknown - _F_copy_string - _F_validate_string - - _F_allow_control = 31 + _F_use_int64 = 0 + _F_disable_urc = 2 + _F_disable_unknown = 3 + _F_copy_string = 4 + + _F_use_number = types.B_USE_NUMBER + _F_validate_string = types.B_VALIDATE_STRING + _F_allow_control = types.B_ALLOW_CONTROL + _F_no_validate_json = types.B_NO_VALIDATE_JSON ) type Options uint64 @@ -47,6 +54,7 @@ const ( OptionDisableUnknown Options = 1 << _F_disable_unknown OptionCopyString Options = 1 << _F_copy_string OptionValidateString Options = 1 << _F_validate_string + OptionNoValidateJSON Options = 1 << _F_no_validate_json ) func (self *Decoder) SetOptions(opts Options) { @@ -106,10 +114,10 @@ func (self *Decoder) CheckTrailings() error { func (self *Decoder) Decode(val interface{}) error { r := bytes.NewBufferString(self.s) dec := json.NewDecoder(r) - if (self.f | uint64(OptionUseNumber)) != 0 { + if (self.f & uint64(OptionUseNumber)) != 0 { dec.UseNumber() } - if (self.f | uint64(OptionDisableUnknown)) != 0 { + if (self.f & uint64(OptionDisableUnknown)) != 0 { dec.DisallowUnknownFields() } return dec.Decode(val) @@ -163,34 +171,26 @@ func Pretouch(vt reflect.Type, opts ...option.CompileOption) error { return nil } -type StreamDecoder struct { - r io.Reader - buf []byte - scanp int - scanned int64 - err error - Decoder -} +type StreamDecoder = json.Decoder // NewStreamDecoder adapts to encoding/json.NewDecoder API. // // NewStreamDecoder returns a new decoder that reads from r. func NewStreamDecoder(r io.Reader) *StreamDecoder { - return &StreamDecoder{r : r} + return json.NewDecoder(r) } -// Decode decodes input stream into val with corresponding data. -// Redundantly bytes may be read and left in its buffer, and can be used at next call. -// Either io error from underlying io.Reader (except io.EOF) -// or syntax error from data will be recorded and stop subsequently decoding. -func (self *StreamDecoder) Decode(val interface{}) (err error) { - dec := json.NewDecoder(self.r) - if (self.f | uint64(OptionUseNumber)) != 0 { - dec.UseNumber() - } - if (self.f | uint64(OptionDisableUnknown)) != 0 { - dec.DisallowUnknownFields() - } - return dec.Decode(val) +// SyntaxError represents json syntax error +type SyntaxError json.SyntaxError + +// Description +func (s SyntaxError) Description() string { + return (*json.SyntaxError)(unsafe.Pointer(&s)).Error() +} +// Error +func (s SyntaxError) Error() string { + return (*json.SyntaxError)(unsafe.Pointer(&s)).Error() } +// MismatchTypeError represents dismatching between json and object +type MismatchTypeError json.UnmarshalTypeError \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/decoder/decoder_amd64.go b/vendor/github.com/bytedance/sonic/decoder/decoder_native.go similarity index 59% rename from vendor/github.com/bytedance/sonic/decoder/decoder_amd64.go rename to vendor/github.com/bytedance/sonic/decoder/decoder_native.go index 2ef19957ccb3a94d9173e40258523532c02320ef..9317d57f6ce4e64134314575a9d7b24b040f3a2e 100644 --- a/vendor/github.com/bytedance/sonic/decoder/decoder_amd64.go +++ b/vendor/github.com/bytedance/sonic/decoder/decoder_native.go @@ -1,4 +1,6 @@ -// +build amd64,go1.15,!go1.21 +//go:build (amd64 && go1.17 && !go1.24) || (arm64 && go1.20 && !go1.24) +// +build amd64,go1.17,!go1.24 arm64,go1.20,!go1.24 + /* * Copyright 2023 ByteDance Inc. @@ -19,48 +21,51 @@ package decoder import ( - `github.com/bytedance/sonic/internal/decoder` + `github.com/bytedance/sonic/internal/decoder/api` ) // Decoder is the decoder context object -type Decoder = decoder.Decoder +type Decoder = api.Decoder + +// SyntaxError represents json syntax error +type SyntaxError = api.SyntaxError -type MismatchTypeError = decoder.MismatchTypeError +// MismatchTypeError represents dismatching between json and object +type MismatchTypeError = api.MismatchTypeError // Options for decode. -type Options = decoder.Options +type Options = api.Options const ( - OptionUseInt64 Options = decoder.OptionUseInt64 - OptionUseNumber Options = decoder.OptionUseNumber - OptionUseUnicodeErrors Options = decoder.OptionUseUnicodeErrors - OptionDisableUnknown Options = decoder.OptionDisableUnknown - OptionCopyString Options = decoder.OptionCopyString - OptionValidateString Options = decoder.OptionValidateString + OptionUseInt64 Options = api.OptionUseInt64 + OptionUseNumber Options = api.OptionUseNumber + OptionUseUnicodeErrors Options = api.OptionUseUnicodeErrors + OptionDisableUnknown Options = api.OptionDisableUnknown + OptionCopyString Options = api.OptionCopyString + OptionValidateString Options = api.OptionValidateString + OptionNoValidateJSON Options = api.OptionNoValidateJSON ) // StreamDecoder is the decoder context object for streaming input. -type StreamDecoder = decoder.StreamDecoder - -type SyntaxError = decoder.SyntaxError +type StreamDecoder = api.StreamDecoder var ( // NewDecoder creates a new decoder instance. - NewDecoder = decoder.NewDecoder + NewDecoder = api.NewDecoder // NewStreamDecoder adapts to encoding/json.NewDecoder API. // // NewStreamDecoder returns a new decoder that reads from r. - NewStreamDecoder = decoder.NewStreamDecoder + NewStreamDecoder = api.NewStreamDecoder // Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in // order to reduce the first-hit latency. // // Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is // a compile option to set the depth of recursive compile for the nested struct type. - Pretouch = decoder.Pretouch + Pretouch = api.Pretouch // Skip skips only one json value, and returns first non-blank character position and its ending position if it is valid. // Otherwise, returns negative error code using start and invalid character position using end - Skip = decoder.Skip + Skip = api.Skip ) diff --git a/vendor/github.com/bytedance/sonic/encoder/encoder_compat.go b/vendor/github.com/bytedance/sonic/encoder/encoder_compat.go index afa80d561ffa663229ab589a77626e5558460019..254defa2052b2367a3476d670a3c9f95a5535d49 100644 --- a/vendor/github.com/bytedance/sonic/encoder/encoder_compat.go +++ b/vendor/github.com/bytedance/sonic/encoder/encoder_compat.go @@ -1,4 +1,4 @@ -// +build !amd64 go1.21 +// +build !amd64,!arm64 go1.24 !go1.17 arm64,!go1.20 /* * Copyright 2023 ByteDance Inc. @@ -27,6 +27,13 @@ import ( `github.com/bytedance/sonic/option` ) +func init() { + println("WARNING:(encoder) sonic only supports (Go1.17~1.23 && CPU amd64) or (G01.20~1.23 && CPU arm64) , but your environment is not suitable") +} + +// EnableFallback indicates if encoder use fallback +const EnableFallback = true + // Options is a set of encoding options. type Options uint64 @@ -37,6 +44,8 @@ const ( bitNoQuoteTextMarshaler bitNoNullSliceOrMap bitValidateString + bitNoValidateJSONMarshaler + bitNoEncoderNewline // used for recursive compile bitPointerValue = 63 @@ -68,6 +77,13 @@ const ( // ValidateString indicates that encoder should validate the input string // before encoding it into JSON. ValidateString Options = 1 << bitValidateString + + // NoValidateJSONMarshaler indicates that the encoder should not validate the output string + // after encoding the JSONMarshaler to JSON. + NoValidateJSONMarshaler Options = 1 << bitNoValidateJSONMarshaler + + // NoEncoderNewline indicates that the encoder should not add a newline after every message + NoEncoderNewline Options = 1 << bitNoEncoderNewline // CompatibleWithStd is used to be compatible with std encoder. CompatibleWithStd Options = SortMapKeys | EscapeHTML | CompactMarshaler @@ -112,6 +128,24 @@ func (self *Encoder) SetValidateString(f bool) { } } +// SetNoValidateJSONMarshaler specifies if option NoValidateJSONMarshaler opens +func (self *Encoder) SetNoValidateJSONMarshaler(f bool) { + if f { + self.Opts |= NoValidateJSONMarshaler + } else { + self.Opts &= ^NoValidateJSONMarshaler + } +} + +// SetNoEncoderNewline specifies if option NoEncoderNewline opens +func (self *Encoder) SetNoEncoderNewline(f bool) { + if f { + self.Opts |= NoEncoderNewline + } else { + self.Opts &= ^NoEncoderNewline + } +} + // SetCompactMarshaler specifies if option CompactMarshaler opens func (self *Encoder) SetCompactMarshaler(f bool) { if f { @@ -157,15 +191,19 @@ func Encode(val interface{}, opts Options) ([]byte, error) { // EncodeInto is like Encode but uses a user-supplied buffer instead of allocating // a new one. func EncodeInto(buf *[]byte, val interface{}, opts Options) error { - if buf == nil { - panic("user-supplied buffer buf is nil") - } - w := bytes.NewBuffer(*buf) - enc := json.NewEncoder(w) - enc.SetEscapeHTML((opts & EscapeHTML) != 0) - err := enc.Encode(val) - *buf = w.Bytes() - return err + if buf == nil { + panic("user-supplied buffer buf is nil") + } + w := bytes.NewBuffer(*buf) + enc := json.NewEncoder(w) + enc.SetEscapeHTML((opts & EscapeHTML) != 0) + err := enc.Encode(val) + *buf = w.Bytes() + l := len(*buf) + if l > 0 && (opts & NoEncoderNewline != 0) && (*buf)[l-1] == '\n' { + *buf = (*buf)[:l-1] + } + return err } // HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029 @@ -212,23 +250,12 @@ func Valid(data []byte) (ok bool, start int) { } // StreamEncoder uses io.Writer as -type StreamEncoder struct { - w io.Writer - Encoder -} +type StreamEncoder = json.Encoder // NewStreamEncoder adapts to encoding/json.NewDecoder API. // // NewStreamEncoder returns a new encoder that write to w. func NewStreamEncoder(w io.Writer) *StreamEncoder { - return &StreamEncoder{w: w} + return json.NewEncoder(w) } -// Encode encodes interface{} as JSON to io.Writer -func (enc *StreamEncoder) Encode(val interface{}) (err error) { - jenc := json.NewEncoder(enc.w) - jenc.SetEscapeHTML((enc.Opts & EscapeHTML) != 0) - jenc.SetIndent(enc.prefix, enc.indent) - err = jenc.Encode(val) - return err -} diff --git a/vendor/github.com/bytedance/sonic/encoder/encoder_amd64.go b/vendor/github.com/bytedance/sonic/encoder/encoder_native.go similarity index 86% rename from vendor/github.com/bytedance/sonic/encoder/encoder_amd64.go rename to vendor/github.com/bytedance/sonic/encoder/encoder_native.go index fa107c73fbe13106071249bfdf5cbc1e71a53a01..b300ebf085c04db96a315a7dfcec5845c9dd5c12 100644 --- a/vendor/github.com/bytedance/sonic/encoder/encoder_amd64.go +++ b/vendor/github.com/bytedance/sonic/encoder/encoder_native.go @@ -1,4 +1,4 @@ -// +build amd64,go1.15,!go1.21 +// +build amd64,go1.17,!go1.24 arm64,go1.20,!go1.24 /* * Copyright 2023 ByteDance Inc. @@ -22,6 +22,8 @@ import ( `github.com/bytedance/sonic/internal/encoder` ) +// EnableFallback indicates if encoder use fallback +const EnableFallback = false // Encoder represents a specific set of encoder configurations. type Encoder = encoder.Encoder @@ -59,8 +61,18 @@ const ( // before encoding it into JSON. ValidateString Options = encoder.ValidateString + // NoValidateJSONMarshaler indicates that the encoder should not validate the output string + // after encoding the JSONMarshaler to JSON. + NoValidateJSONMarshaler Options = encoder.NoValidateJSONMarshaler + + // NoEncoderNewline indicates that the encoder should not add a newline after every message + NoEncoderNewline Options = encoder.NoEncoderNewline + // CompatibleWithStd is used to be compatible with std encoder. CompatibleWithStd Options = encoder.CompatibleWithStd + + // Encode Infinity or Nan float into `null`, instead of returning an error. + EncodeNullForInfOrNan Options = encoder.EncodeNullForInfOrNan ) @@ -105,4 +117,4 @@ var ( // // NewStreamEncoder returns a new encoder that write to w. NewStreamEncoder = encoder.NewStreamEncoder -) \ No newline at end of file +) diff --git a/vendor/github.com/bytedance/sonic/go.work b/vendor/github.com/bytedance/sonic/go.work index e21d6f872d1e2f2c4bdf21738bfeff745fbe4dab..8d2af51b91ef61f8e28774aedb177a1207ff3caf 100644 --- a/vendor/github.com/bytedance/sonic/go.work +++ b/vendor/github.com/bytedance/sonic/go.work @@ -2,7 +2,8 @@ go 1.18 use ( . - ./generic_test - ./fuzz ./external_jsonlib_test + ./fuzz + ./generic_test + ./loader ) diff --git a/vendor/github.com/bytedance/sonic/go.work.sum b/vendor/github.com/bytedance/sonic/go.work.sum new file mode 100644 index 0000000000000000000000000000000000000000..d59625879f755ee73c2af5b02846acb65821c6a0 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/go.work.sum @@ -0,0 +1 @@ +github.com/bytedance/sonic/loader v0.2.0/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= diff --git a/vendor/github.com/bytedance/sonic/internal/base64/b64_amd64.go b/vendor/github.com/bytedance/sonic/internal/base64/b64_amd64.go new file mode 100644 index 0000000000000000000000000000000000000000..43db80d8a029f59a6b4a503b388a7844f4f6632f --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/base64/b64_amd64.go @@ -0,0 +1,46 @@ +// +build amd64,go1.17,!go1.24 + +/** + * Copyright 2023 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package base64 + +import ( + "github.com/cloudwego/base64x" +) + +func DecodeBase64(src string) ([]byte, error) { + return base64x.StdEncoding.DecodeString(src) +} + +func EncodeBase64(buf []byte, src []byte) []byte { + if len(src) == 0 { + return append(buf, '"', '"') + } + buf = append(buf, '"') + need := base64x.StdEncoding.EncodedLen(len(src)) + if cap(buf) - len(buf) < need { + tmp := make([]byte, len(buf), len(buf) + need*2) + copy(tmp, buf) + buf = tmp + } + base64x.StdEncoding.Encode(buf[len(buf):cap(buf)], src) + buf = buf[:len(buf) + need] + buf = append(buf, '"') + return buf +} + + \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/internal/base64/b64_compat.go b/vendor/github.com/bytedance/sonic/internal/base64/b64_compat.go new file mode 100644 index 0000000000000000000000000000000000000000..6688faa2033327991f31b2ee0a1900dd46d6af38 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/base64/b64_compat.go @@ -0,0 +1,44 @@ +// +build !amd64 !go1.17 go1.24 + +/* + * Copyright 2022 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package base64 + +import ( + "encoding/base64" +) + +func EncodeBase64(buf []byte, src []byte) []byte { + if len(src) == 0 { + return append(buf, '"', '"') + } + buf = append(buf, '"') + need := base64.StdEncoding.EncodedLen(len(src)) + if cap(buf) - len(buf) < need { + tmp := make([]byte, len(buf), len(buf) + need*2) + copy(tmp, buf) + buf = tmp + } + base64.StdEncoding.Encode(buf[len(buf):cap(buf)], src) + buf = buf[:len(buf) + need] + buf = append(buf, '"') + return buf +} + +func DecodeBase64(src string) ([]byte, error) { + return base64.StdEncoding.DecodeString(src) +} diff --git a/vendor/github.com/bytedance/sonic/internal/cpu/features.go b/vendor/github.com/bytedance/sonic/internal/cpu/features.go index f9ee3b8f32e7f3d5ebedc8820ed19a52821e3fc5..fd4dbda3c16d88c5e6a84d5f2d2d4db60d31894e 100644 --- a/vendor/github.com/bytedance/sonic/internal/cpu/features.go +++ b/vendor/github.com/bytedance/sonic/internal/cpu/features.go @@ -24,7 +24,6 @@ import ( ) var ( - HasAVX = cpuid.CPU.Has(cpuid.AVX) HasAVX2 = cpuid.CPU.Has(cpuid.AVX2) HasSSE = cpuid.CPU.Has(cpuid.SSE) ) @@ -33,7 +32,8 @@ func init() { switch v := os.Getenv("SONIC_MODE"); v { case "" : break case "auto" : break - case "noavx" : HasAVX = false; fallthrough + case "noavx" : HasAVX2 = false + // will also disable avx, act as `noavx`, we remain it to make sure forward compatibility case "noavx2" : HasAVX2 = false default : panic(fmt.Sprintf("invalid mode: '%s', should be one of 'auto', 'noavx', 'noavx2'", v)) } diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/decoder.go b/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder.go similarity index 58% rename from vendor/github.com/bytedance/sonic/internal/decoder/decoder.go rename to vendor/github.com/bytedance/sonic/internal/decoder/api/decoder.go index 19ad719659f754c172538a771e4135c7ba9ba202..0dc01998f1bb437842685d90ac72ffd7f7999019 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/decoder.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder.go @@ -14,51 +14,52 @@ * limitations under the License. */ -package decoder +package api import ( - `unsafe` - `encoding/json` `reflect` - `runtime` `github.com/bytedance/sonic/internal/native` `github.com/bytedance/sonic/internal/native/types` + `github.com/bytedance/sonic/internal/decoder/consts` + `github.com/bytedance/sonic/internal/decoder/errors` `github.com/bytedance/sonic/internal/rt` `github.com/bytedance/sonic/option` - `github.com/bytedance/sonic/utf8` ) const ( - _F_use_int64 = iota - _F_use_number - _F_disable_urc - _F_disable_unknown - _F_copy_string - _F_validate_string - - _F_allow_control = 31 + _F_allow_control = consts.F_allow_control + _F_copy_string = consts.F_copy_string + _F_disable_unknown = consts.F_disable_unknown + _F_disable_urc = consts.F_disable_urc + _F_use_int64 = consts.F_use_int64 + _F_use_number = consts.F_use_number + _F_validate_string = consts.F_validate_string + + _MaxStack = consts.MaxStack + + OptionUseInt64 = consts.OptionUseInt64 + OptionUseNumber = consts.OptionUseNumber + OptionUseUnicodeErrors = consts.OptionUseUnicodeErrors + OptionDisableUnknown = consts.OptionDisableUnknown + OptionCopyString = consts.OptionCopyString + OptionValidateString = consts.OptionValidateString + OptionNoValidateJSON = consts.OptionNoValidateJSON ) -type Options uint64 - -const ( - OptionUseInt64 Options = 1 << _F_use_int64 - OptionUseNumber Options = 1 << _F_use_number - OptionUseUnicodeErrors Options = 1 << _F_disable_urc - OptionDisableUnknown Options = 1 << _F_disable_unknown - OptionCopyString Options = 1 << _F_copy_string - OptionValidateString Options = 1 << _F_validate_string +type ( + Options = consts.Options + MismatchTypeError = errors.MismatchTypeError + SyntaxError = errors.SyntaxError ) func (self *Decoder) SetOptions(opts Options) { - if (opts & OptionUseNumber != 0) && (opts & OptionUseInt64 != 0) { + if (opts & consts.OptionUseNumber != 0) && (opts & consts.OptionUseInt64 != 0) { panic("can't set OptionUseInt64 and OptionUseNumber both!") } self.f = uint64(opts) } - // Decoder is the decoder context object type Decoder struct { i int @@ -109,44 +110,7 @@ func (self *Decoder) CheckTrailings() error { // Decode parses the JSON-encoded data from current position and stores the result // in the value pointed to by val. func (self *Decoder) Decode(val interface{}) error { - /* validate json if needed */ - if (self.f & (1 << _F_validate_string)) != 0 && !utf8.ValidateString(self.s){ - dbuf := utf8.CorrectWith(nil, rt.Str2Mem(self.s), "\ufffd") - self.s = rt.Mem2Str(dbuf) - } - - vv := rt.UnpackEface(val) - vp := vv.Value - - /* check for nil type */ - if vv.Type == nil { - return &json.InvalidUnmarshalError{} - } - - /* must be a non-nil pointer */ - if vp == nil || vv.Type.Kind() != reflect.Ptr { - return &json.InvalidUnmarshalError{Type: vv.Type.Pack()} - } - - etp := rt.PtrElem(vv.Type) - - /* check the defined pointer type for issue 379 */ - if vv.Type.IsNamed() { - newp := vp - etp = vv.Type - vp = unsafe.Pointer(&newp) - } - - /* create a new stack, and call the decoder */ - sb := newStack() - nb, err := decodeTypedPointer(self.s, self.i, etp, vp, sb, self.f) - /* return the stack back */ - self.i = nb - freeStack(sb) - - /* avoid GC ahead */ - runtime.KeepAlive(vv) - return err + return decodeImpl(&self.s, &self.i, self.f, val) } // UseInt64 indicates the Decoder to unmarshal an integer into an interface{} as an @@ -194,53 +158,7 @@ func (self *Decoder) ValidateString() { // Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is // a compile option to set the depth of recursive compile for the nested struct type. func Pretouch(vt reflect.Type, opts ...option.CompileOption) error { - cfg := option.DefaultCompileOptions() - for _, opt := range opts { - opt(&cfg) - } - return pretouchRec(map[reflect.Type]bool{vt:true}, cfg) -} - -func pretouchType(_vt reflect.Type, opts option.CompileOptions) (map[reflect.Type]bool, error) { - /* compile function */ - compiler := newCompiler().apply(opts) - decoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) { - if pp, err := compiler.compile(_vt); err != nil { - return nil, err - } else { - as := newAssembler(pp) - as.name = _vt.String() - return as.Load(), nil - } - } - - /* find or compile */ - vt := rt.UnpackType(_vt) - if val := programCache.Get(vt); val != nil { - return nil, nil - } else if _, err := programCache.Compute(vt, decoder); err == nil { - return compiler.rec, nil - } else { - return nil, err - } -} - -func pretouchRec(vtm map[reflect.Type]bool, opts option.CompileOptions) error { - if opts.RecursiveDepth < 0 || len(vtm) == 0 { - return nil - } - next := make(map[reflect.Type]bool) - for vt := range(vtm) { - sub, err := pretouchType(vt, opts) - if err != nil { - return err - } - for svt := range(sub) { - next[svt] = true - } - } - opts.RecursiveDepth -= 1 - return pretouchRec(next, opts) + return pretouchImpl(vt, opts...) } // Skip skips only one json value, and returns first non-blank character position and its ending position if it is valid. @@ -252,4 +170,4 @@ func Skip(data []byte) (start int, end int) { ret := native.SkipOne(&s, &p, m, uint64(0)) types.FreeStateMachine(m) return ret, p -} \ No newline at end of file +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_amd64.go b/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_amd64.go new file mode 100644 index 0000000000000000000000000000000000000000..4e1c3f42c0df0869f31011c7682a41afffb2cd53 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_amd64.go @@ -0,0 +1,38 @@ +//go:build go1.17 && !go1.24 +// +build go1.17,!go1.24 + +/* + * Copyright 2021 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package api + +import ( + "github.com/bytedance/sonic/internal/envs" + "github.com/bytedance/sonic/internal/decoder/jitdec" + "github.com/bytedance/sonic/internal/decoder/optdec" +) + +var ( + pretouchImpl = jitdec.Pretouch + decodeImpl = jitdec.Decode +) + + func init() { + if envs.UseOptDec { + pretouchImpl = optdec.Pretouch + decodeImpl = optdec.Decode + } + } diff --git a/vendor/github.com/bytedance/sonic/loader/loader_go115.go b/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_arm64.go similarity index 62% rename from vendor/github.com/bytedance/sonic/loader/loader_go115.go rename to vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_arm64.go index a1d4d78923271aa3154c983182f599837b395af6..65a9478b45d9210fd6460b5e4f134986edfaf34c 100644 --- a/vendor/github.com/bytedance/sonic/loader/loader_go115.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_arm64.go @@ -1,5 +1,4 @@ -//go:build go1.15 && !go1.16 -// +build go1.15,!go1.16 +// +build go1.17,!go1.24 /* * Copyright 2021 ByteDance Inc. @@ -17,12 +16,23 @@ * limitations under the License. */ -package loader +package api import ( - `github.com/bytedance/sonic/internal/loader` + `github.com/bytedance/sonic/internal/decoder/optdec` + `github.com/bytedance/sonic/internal/envs` ) -func (self Loader) LoadOne(text []byte, funcName string, frameSize int, argSize int, argStackmap []bool, localStackmap []bool) Function { - return Function(loader.Loader(text).Load(funcName, frameSize, argSize, argStackmap, localStackmap)) -} \ No newline at end of file +var ( + pretouchImpl = optdec.Pretouch + decodeImpl = optdec.Decode +) + + +func init() { + // whe in aarch64. we enable all optimize + envs.EnableOptDec() + envs.EnableFastMap() +} + + diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/stream.go b/vendor/github.com/bytedance/sonic/internal/decoder/api/stream.go similarity index 58% rename from vendor/github.com/bytedance/sonic/internal/decoder/stream.go rename to vendor/github.com/bytedance/sonic/internal/decoder/api/stream.go index e1e0f73b96560dfc3055c5efdb037093b6f4eb70..8a8102dd556ad8226a0c019ac29e71a58fb2f04c 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/stream.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/api/stream.go @@ -14,19 +14,21 @@ * limitations under the License. */ -package decoder +package api import ( `bytes` `io` `sync` - `github.com/bytedance/sonic/option` + `github.com/bytedance/sonic/internal/native` `github.com/bytedance/sonic/internal/native/types` + `github.com/bytedance/sonic/internal/rt` + `github.com/bytedance/sonic/option` ) var ( - minLeftBufferShift uint = 1 + minLeftBufferShift uint = 1 ) // StreamDecoder is the decoder context object for streaming input. @@ -45,6 +47,12 @@ var bufPool = sync.Pool{ }, } +func freeBytes(buf []byte) { + if rt.CanSizeResue(cap(buf)) { + bufPool.Put(buf[:0]) + } +} + // NewStreamDecoder adapts to encoding/json.NewDecoder API. // // NewStreamDecoder returns a new decoder that reads from r. @@ -57,80 +65,53 @@ func NewStreamDecoder(r io.Reader) *StreamDecoder { // Either io error from underlying io.Reader (except io.EOF) // or syntax error from data will be recorded and stop subsequently decoding. func (self *StreamDecoder) Decode(val interface{}) (err error) { - if self.err != nil { - return self.err - } - - var buf = self.buf[self.scanp:] - var p = 0 - var recycle bool - if cap(buf) == 0 { - buf = bufPool.Get().([]byte) - recycle = true - } - - var first = true - var repeat = true -read_more: - for { - l := len(buf) - realloc(&buf) - n, err := self.r.Read(buf[l:cap(buf)]) - buf = buf[:l+n] - if err != nil { - repeat = false - if err == io.EOF { - if len(buf) == 0 { - return err - } - break + // read more data into buf + if self.More() { + var s = self.scanp + try_skip: + var e = len(self.buf) + var src = rt.Mem2Str(self.buf[s:e]) + // try skip + var x = 0; + if y := native.SkipOneFast(&src, &x); y < 0 { + if self.readMore() { + goto try_skip + } else { + err = SyntaxError{e, self.s, types.ParsingError(-s), ""} + self.setErr(err) + return } - self.err = err - return err + } else { + s = y + s + e = x + s } - if n > 0 || first { - break - } - } - first = false - - l := len(buf) - if l > 0 { - self.Decoder.Reset(string(buf)) + + // must copy string here for safety + self.Decoder.Reset(string(self.buf[s:e])) err = self.Decoder.Decode(val) if err != nil { - if repeat && self.repeatable(err) { - goto read_more - } - self.err = err + self.setErr(err) + return } - p = self.Decoder.Pos() - self.scanned += int64(p) - self.scanp = 0 - } - - if l > p { - // remain undecoded bytes, so copy them into self.buf - self.buf = append(self.buf[:0], buf[p:]...) - } else { - self.buf = nil - recycle = true - } + self.scanp = e + _, empty := self.scan() + if empty { + // no remain valid bytes, thus we just recycle buffer + mem := self.buf + self.buf = nil + freeBytes(mem) + } else { + // remain undecoded bytes, move them onto head + n := copy(self.buf, self.buf[self.scanp:]) + self.buf = self.buf[:n] + } - if recycle { - buf = buf[:0] - bufPool.Put(buf) - } - return err -} + self.scanned += int64(self.scanp) + self.scanp = 0 + } -func (self StreamDecoder) repeatable(err error) bool { - if ee, ok := err.(SyntaxError); ok && - (ee.Code == types.ERR_EOF || (ee.Code == types.ERR_INVALID_CHAR && self.i >= len(self.s)-1)) { - return true - } - return false + return self.err } // InputOffset returns the input stream byte offset of the current decoder position. @@ -155,28 +136,72 @@ func (self *StreamDecoder) More() bool { return err == nil && c != ']' && c != '}' } +// More reports whether there is another element in the +// current array or object being parsed. +func (self *StreamDecoder) readMore() bool { + if self.err != nil { + return false + } + + var err error + var n int + for { + // Grow buffer if not large enough. + l := len(self.buf) + realloc(&self.buf) + + n, err = self.r.Read(self.buf[l:cap(self.buf)]) + self.buf = self.buf[: l+n] + + self.scanp = l + _, empty := self.scan() + if !empty { + return true + } + + // buffer has been scanned, now report any error + if err != nil { + self.setErr(err) + return false + } + } +} + +func (self *StreamDecoder) setErr(err error) { + self.err = err + mem := self.buf[:0] + self.buf = nil + freeBytes(mem) +} + func (self *StreamDecoder) peek() (byte, error) { var err error for { - for i := self.scanp; i < len(self.buf); i++ { - c := self.buf[i] - if isSpace(c) { - continue - } - self.scanp = i - return c, nil + c, empty := self.scan() + if !empty { + return byte(c), nil } // buffer has been scanned, now report any error if err != nil { - if err != io.EOF { - self.err = err - } + self.setErr(err) return 0, err } err = self.refill() } } +func (self *StreamDecoder) scan() (byte, bool) { + for i := self.scanp; i < len(self.buf); i++ { + c := self.buf[i] + if isSpace(c) { + continue + } + self.scanp = i + return c, false + } + return 0, true +} + func isSpace(c byte) bool { return types.SPACE_MASK & (1 << c) != 0 } @@ -201,17 +226,23 @@ func (self *StreamDecoder) refill() error { return err } -func realloc(buf *[]byte) { +func realloc(buf *[]byte) bool { l := uint(len(*buf)) c := uint(cap(*buf)) + if c == 0 { + *buf = bufPool.Get().([]byte) + return true + } if c - l <= c >> minLeftBufferShift { e := l+(l>>minLeftBufferShift) - if e < option.DefaultDecoderBufferSize { - e = option.DefaultDecoderBufferSize + if e <= c { + e = c*2 } tmp := make([]byte, l, e) copy(tmp, *buf) *buf = tmp + return true } + return false } diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/assembler_amd64_go116.go b/vendor/github.com/bytedance/sonic/internal/decoder/assembler_amd64_go116.go deleted file mode 100644 index a2618bb3cebcad9aeffdf2a96feab911171ca9e4..0000000000000000000000000000000000000000 --- a/vendor/github.com/bytedance/sonic/internal/decoder/assembler_amd64_go116.go +++ /dev/null @@ -1,2013 +0,0 @@ -// +build go1.15,!go1.17 - -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package decoder - -import ( - `encoding/json` - `fmt` - `math` - `reflect` - `strconv` - `unsafe` - - `github.com/bytedance/sonic/internal/caching` - `github.com/bytedance/sonic/internal/jit` - `github.com/bytedance/sonic/internal/native` - `github.com/bytedance/sonic/internal/native/types` - `github.com/bytedance/sonic/internal/rt` - `github.com/twitchyliquid64/golang-asm/obj` - `github.com/twitchyliquid64/golang-asm/obj/x86` -) - -/** Register Allocations - * - * State Registers: - * - * %rbx : stack base - * %r12 : input pointer - * %r13 : input length - * %r14 : input cursor - * %r15 : value pointer - * - * Error Registers: - * - * %r10 : error type register - * %r11 : error pointer register - */ - -/** Function Prototype & Stack Map - * - * func (s string, ic int, vp unsafe.Pointer, sb *_Stack, fv uint64, sv string) (rc int, err error) - * - * s.buf : (FP) - * s.len : 8(FP) - * ic : 16(FP) - * vp : 24(FP) - * sb : 32(FP) - * fv : 40(FP) - * sv : 56(FP) - * err.vt : 72(FP) - * err.vp : 80(FP) - */ - -const ( - _FP_args = 96 // 96 bytes to pass arguments and return values for this function - _FP_fargs = 80 // 80 bytes for passing arguments to other Go functions - _FP_saves = 40 // 40 bytes for saving the registers before CALL instructions - _FP_locals = 144 // 144 bytes for local variables -) - -const ( - _FP_offs = _FP_fargs + _FP_saves + _FP_locals - _FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer - _FP_base = _FP_size + 8 // 8 bytes for the return address -) - -const ( - _IM_null = 0x6c6c756e // 'null' - _IM_true = 0x65757274 // 'true' - _IM_alse = 0x65736c61 // 'alse' ('false' without the 'f') -) - -const ( - _BM_space = (1 << ' ') | (1 << '\t') | (1 << '\r') | (1 << '\n') -) - -const ( - _MODE_JSON = 1 << 3 // base64 mode -) - -const ( - _LB_error = "_error" - _LB_im_error = "_im_error" - _LB_eof_error = "_eof_error" - _LB_type_error = "_type_error" - _LB_field_error = "_field_error" - _LB_range_error = "_range_error" - _LB_stack_error = "_stack_error" - _LB_base64_error = "_base64_error" - _LB_unquote_error = "_unquote_error" - _LB_parsing_error = "_parsing_error" - _LB_parsing_error_v = "_parsing_error_v" - _LB_mismatch_error = "_mismatch_error" -) - -const ( - _LB_char_0_error = "_char_0_error" - _LB_char_1_error = "_char_1_error" - _LB_char_2_error = "_char_2_error" - _LB_char_3_error = "_char_3_error" - _LB_char_4_error = "_char_4_error" - _LB_char_m2_error = "_char_m2_error" - _LB_char_m3_error = "_char_m3_error" -) - -const ( - _LB_skip_one = "_skip_one" - _LB_skip_key_value = "_skip_key_value" -) - -var ( - _AX = jit.Reg("AX") - _CX = jit.Reg("CX") - _DX = jit.Reg("DX") - _DI = jit.Reg("DI") - _SI = jit.Reg("SI") - _BP = jit.Reg("BP") - _SP = jit.Reg("SP") - _R8 = jit.Reg("R8") - _R9 = jit.Reg("R9") - _X0 = jit.Reg("X0") - _X1 = jit.Reg("X1") -) - -var ( - _ST = jit.Reg("BX") - _IP = jit.Reg("R12") - _IL = jit.Reg("R13") - _IC = jit.Reg("R14") - _VP = jit.Reg("R15") -) - -var ( - _R10 = jit.Reg("R10") // used for gcWriteBarrier - _DF = jit.Reg("R10") // reuse R10 in generic decoder for flags - _ET = jit.Reg("R10") - _EP = jit.Reg("R11") -) - -var ( - _ARG_s = _ARG_sp - _ARG_sp = jit.Ptr(_SP, _FP_base) - _ARG_sl = jit.Ptr(_SP, _FP_base + 8) - _ARG_ic = jit.Ptr(_SP, _FP_base + 16) - _ARG_vp = jit.Ptr(_SP, _FP_base + 24) - _ARG_sb = jit.Ptr(_SP, _FP_base + 32) - _ARG_fv = jit.Ptr(_SP, _FP_base + 40) -) - -var ( - _VAR_sv = _VAR_sv_p - _VAR_sv_p = jit.Ptr(_SP, _FP_base + 48) - _VAR_sv_n = jit.Ptr(_SP, _FP_base + 56) - _VAR_vk = jit.Ptr(_SP, _FP_base + 64) -) - -var ( - _RET_rc = jit.Ptr(_SP, _FP_base + 72) - _RET_et = jit.Ptr(_SP, _FP_base + 80) - _RET_ep = jit.Ptr(_SP, _FP_base + 88) -) - -var ( - _VAR_st = _VAR_st_Vt - _VAR_sr = jit.Ptr(_SP, _FP_fargs + _FP_saves) -) - - -var ( - _VAR_st_Vt = jit.Ptr(_SP, _FP_fargs + _FP_saves + 0) - _VAR_st_Dv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8) - _VAR_st_Iv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16) - _VAR_st_Ep = jit.Ptr(_SP, _FP_fargs + _FP_saves + 24) - _VAR_st_Db = jit.Ptr(_SP, _FP_fargs + _FP_saves + 32) - _VAR_st_Dc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 40) -) - -var ( - _VAR_ss_AX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 48) - _VAR_ss_CX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 56) - _VAR_ss_SI = jit.Ptr(_SP, _FP_fargs + _FP_saves + 64) - _VAR_ss_R8 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 72) - _VAR_ss_R9 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 80) -) - -var ( - _VAR_bs_p = jit.Ptr(_SP, _FP_fargs + _FP_saves + 88) - _VAR_bs_n = jit.Ptr(_SP, _FP_fargs + _FP_saves + 96) - _VAR_bs_LR = jit.Ptr(_SP, _FP_fargs + _FP_saves + 104) -) - -var _VAR_fl = jit.Ptr(_SP, _FP_fargs + _FP_saves + 112) - -var ( - _VAR_et = jit.Ptr(_SP, _FP_fargs + _FP_saves + 120) // save dismatched type - _VAR_ic = jit.Ptr(_SP, _FP_fargs + _FP_saves + 128) // save dismatched position - _VAR_pc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 136) // save skip return pc -) - -type _Assembler struct { - jit.BaseAssembler - p _Program - name string -} - -func newAssembler(p _Program) *_Assembler { - return new(_Assembler).Init(p) -} - -/** Assembler Interface **/ - -func (self *_Assembler) Load() _Decoder { - return ptodec(self.BaseAssembler.Load("decode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs)) -} - -func (self *_Assembler) Init(p _Program) *_Assembler { - self.p = p - self.BaseAssembler.Init(self.compile) - return self -} - -func (self *_Assembler) compile() { - self.prologue() - self.instrs() - self.epilogue() - self.copy_string() - self.escape_string() - self.escape_string_twice() - self.skip_one() - self.skip_key_value() - self.mismatch_error() - self.type_error() - self.field_error() - self.range_error() - self.stack_error() - self.base64_error() - self.parsing_error() -} - -/** Assembler Stages **/ - -var _OpFuncTab = [256]func(*_Assembler, *_Instr) { - _OP_any : (*_Assembler)._asm_OP_any, - _OP_dyn : (*_Assembler)._asm_OP_dyn, - _OP_str : (*_Assembler)._asm_OP_str, - _OP_bin : (*_Assembler)._asm_OP_bin, - _OP_bool : (*_Assembler)._asm_OP_bool, - _OP_num : (*_Assembler)._asm_OP_num, - _OP_i8 : (*_Assembler)._asm_OP_i8, - _OP_i16 : (*_Assembler)._asm_OP_i16, - _OP_i32 : (*_Assembler)._asm_OP_i32, - _OP_i64 : (*_Assembler)._asm_OP_i64, - _OP_u8 : (*_Assembler)._asm_OP_u8, - _OP_u16 : (*_Assembler)._asm_OP_u16, - _OP_u32 : (*_Assembler)._asm_OP_u32, - _OP_u64 : (*_Assembler)._asm_OP_u64, - _OP_f32 : (*_Assembler)._asm_OP_f32, - _OP_f64 : (*_Assembler)._asm_OP_f64, - _OP_unquote : (*_Assembler)._asm_OP_unquote, - _OP_nil_1 : (*_Assembler)._asm_OP_nil_1, - _OP_nil_2 : (*_Assembler)._asm_OP_nil_2, - _OP_nil_3 : (*_Assembler)._asm_OP_nil_3, - _OP_deref : (*_Assembler)._asm_OP_deref, - _OP_index : (*_Assembler)._asm_OP_index, - _OP_is_null : (*_Assembler)._asm_OP_is_null, - _OP_is_null_quote : (*_Assembler)._asm_OP_is_null_quote, - _OP_map_init : (*_Assembler)._asm_OP_map_init, - _OP_map_key_i8 : (*_Assembler)._asm_OP_map_key_i8, - _OP_map_key_i16 : (*_Assembler)._asm_OP_map_key_i16, - _OP_map_key_i32 : (*_Assembler)._asm_OP_map_key_i32, - _OP_map_key_i64 : (*_Assembler)._asm_OP_map_key_i64, - _OP_map_key_u8 : (*_Assembler)._asm_OP_map_key_u8, - _OP_map_key_u16 : (*_Assembler)._asm_OP_map_key_u16, - _OP_map_key_u32 : (*_Assembler)._asm_OP_map_key_u32, - _OP_map_key_u64 : (*_Assembler)._asm_OP_map_key_u64, - _OP_map_key_f32 : (*_Assembler)._asm_OP_map_key_f32, - _OP_map_key_f64 : (*_Assembler)._asm_OP_map_key_f64, - _OP_map_key_str : (*_Assembler)._asm_OP_map_key_str, - _OP_map_key_utext : (*_Assembler)._asm_OP_map_key_utext, - _OP_map_key_utext_p : (*_Assembler)._asm_OP_map_key_utext_p, - _OP_array_skip : (*_Assembler)._asm_OP_array_skip, - _OP_array_clear : (*_Assembler)._asm_OP_array_clear, - _OP_array_clear_p : (*_Assembler)._asm_OP_array_clear_p, - _OP_slice_init : (*_Assembler)._asm_OP_slice_init, - _OP_slice_append : (*_Assembler)._asm_OP_slice_append, - _OP_object_skip : (*_Assembler)._asm_OP_object_skip, - _OP_object_next : (*_Assembler)._asm_OP_object_next, - _OP_struct_field : (*_Assembler)._asm_OP_struct_field, - _OP_unmarshal : (*_Assembler)._asm_OP_unmarshal, - _OP_unmarshal_p : (*_Assembler)._asm_OP_unmarshal_p, - _OP_unmarshal_text : (*_Assembler)._asm_OP_unmarshal_text, - _OP_unmarshal_text_p : (*_Assembler)._asm_OP_unmarshal_text_p, - _OP_lspace : (*_Assembler)._asm_OP_lspace, - _OP_match_char : (*_Assembler)._asm_OP_match_char, - _OP_check_char : (*_Assembler)._asm_OP_check_char, - _OP_load : (*_Assembler)._asm_OP_load, - _OP_save : (*_Assembler)._asm_OP_save, - _OP_drop : (*_Assembler)._asm_OP_drop, - _OP_drop_2 : (*_Assembler)._asm_OP_drop_2, - _OP_recurse : (*_Assembler)._asm_OP_recurse, - _OP_goto : (*_Assembler)._asm_OP_goto, - _OP_switch : (*_Assembler)._asm_OP_switch, - _OP_check_char_0 : (*_Assembler)._asm_OP_check_char_0, - _OP_dismatch_err : (*_Assembler)._asm_OP_dismatch_err, - _OP_go_skip : (*_Assembler)._asm_OP_go_skip, - _OP_add : (*_Assembler)._asm_OP_add, - _OP_check_empty : (*_Assembler)._asm_OP_check_empty, -} - -func (self *_Assembler) instr(v *_Instr) { - if fn := _OpFuncTab[v.op()]; fn != nil { - fn(self, v) - } else { - panic(fmt.Sprintf("invalid opcode: %d", v.op())) - } -} - -func (self *_Assembler) instrs() { - for i, v := range self.p { - self.Mark(i) - self.instr(&v) - self.debug_instr(i, &v) - } -} - -func (self *_Assembler) epilogue() { - self.Mark(len(self.p)) - self.Emit("XORL", _EP, _EP) // XORL EP, EP - self.Emit("MOVQ", _VAR_et, _ET) // MOVQ VAR_et, ET - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ", _LB_mismatch_error) // JNZ _LB_mismatch_error - self.Link(_LB_error) // _error: - self.Emit("MOVQ", _IC, _RET_rc) // MOVQ IC, rc<>+40(FP) - self.Emit("MOVQ", _ET, _RET_et) // MOVQ ET, et<>+48(FP) - self.Emit("MOVQ", _EP, _RET_ep) // MOVQ EP, ep<>+56(FP) - self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP - self.Emit("ADDQ", jit.Imm(_FP_size), _SP) // ADDQ $_FP_size, SP - self.Emit("RET") // RET -} - -func (self *_Assembler) prologue() { - self.Emit("SUBQ", jit.Imm(_FP_size), _SP) // SUBQ $_FP_size, SP - self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP) - self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP - self.Emit("MOVQ", _ARG_sp, _IP) // MOVQ s.p<>+0(FP), IP - self.Emit("MOVQ", _ARG_sl, _IL) // MOVQ s.l<>+8(FP), IL - self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC - self.Emit("MOVQ", _ARG_vp, _VP) // MOVQ vp<>+24(FP), VP - self.Emit("MOVQ", _ARG_sb, _ST) // MOVQ vp<>+32(FP), ST - // initialize digital buffer first - self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_st_Dc) // MOVQ $_MaxDigitNums, ss.Dcap - self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX) // LEAQ _DbufOffset(ST), AX - self.Emit("MOVQ", _AX, _VAR_st_Db) // MOVQ AX, ss.Dbuf - self.Emit("XORL", _AX, _AX) // XORL AX, AX - self.Emit("MOVQ", _AX, _VAR_et) // MOVQ AX, ss.Dp -} - -/** Function Calling Helpers **/ - -var _REG_go = []obj.Addr { - _ST, - _VP, - _IP, - _IL, - _IC, -} - -func (self *_Assembler) save(r ...obj.Addr) { - for i, v := range r { - if i > _FP_saves / 8 - 1 { - panic("too many registers to save") - } else { - self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8)) - } - } -} - -func (self *_Assembler) load(r ...obj.Addr) { - for i, v := range r { - if i > _FP_saves / 8 - 1 { - panic("too many registers to load") - } else { - self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v) - } - } -} - -func (self *_Assembler) call(fn obj.Addr) { - self.Emit("MOVQ", fn, _AX) // MOVQ ${fn}, AX - self.Rjmp("CALL", _AX) // CALL AX -} - -func (self *_Assembler) call_go(fn obj.Addr) { - self.save(_REG_go...) // SAVE $REG_go - self.call(fn) // CALL ${fn} - self.load(_REG_go...) // LOAD $REG_go -} - -func (self *_Assembler) call_sf(fn obj.Addr) { - self.Emit("LEAQ", _ARG_s, _DI) // LEAQ s<>+0(FP), DI - self.Emit("MOVQ", _IC, _ARG_ic) // MOVQ IC, ic<>+16(FP) - self.Emit("LEAQ", _ARG_ic, _SI) // LEAQ ic<>+16(FP), SI - self.Emit("LEAQ", jit.Ptr(_ST, _FsmOffset), _DX) // LEAQ _FsmOffset(ST), DX - self.Emit("MOVQ", _ARG_fv, _CX) - self.call(fn) // CALL ${fn} - self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC -} - -func (self *_Assembler) call_vf(fn obj.Addr) { - self.Emit("LEAQ", _ARG_s, _DI) // LEAQ s<>+0(FP), DI - self.Emit("MOVQ", _IC, _ARG_ic) // MOVQ IC, ic<>+16(FP) - self.Emit("LEAQ", _ARG_ic, _SI) // LEAQ ic<>+16(FP), SI - self.Emit("LEAQ", _VAR_st, _DX) // LEAQ st, DX - self.call(fn) // CALL ${fn} - self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC -} - -/** Assembler Error Handlers **/ - -var ( - _F_convT64 = jit.Func(convT64) - _F_error_wrap = jit.Func(error_wrap) - _F_error_type = jit.Func(error_type) - _F_error_field = jit.Func(error_field) - _F_error_value = jit.Func(error_value) - _F_error_mismatch = jit.Func(error_mismatch) -) - -var ( - _I_int8 , _T_int8 = rtype(reflect.TypeOf(int8(0))) - _I_int16 , _T_int16 = rtype(reflect.TypeOf(int16(0))) - _I_int32 , _T_int32 = rtype(reflect.TypeOf(int32(0))) - _I_uint8 , _T_uint8 = rtype(reflect.TypeOf(uint8(0))) - _I_uint16 , _T_uint16 = rtype(reflect.TypeOf(uint16(0))) - _I_uint32 , _T_uint32 = rtype(reflect.TypeOf(uint32(0))) - _I_float32 , _T_float32 = rtype(reflect.TypeOf(float32(0))) -) - -var ( - _T_error = rt.UnpackType(errorType) - _I_base64_CorruptInputError = jit.Itab(_T_error, base64CorruptInputError) -) - -var ( - _V_stackOverflow = jit.Imm(int64(uintptr(unsafe.Pointer(&stackOverflow)))) - _I_json_UnsupportedValueError = jit.Itab(_T_error, reflect.TypeOf(new(json.UnsupportedValueError))) - _I_json_MismatchTypeError = jit.Itab(_T_error, reflect.TypeOf(new(MismatchTypeError))) -) - -func (self *_Assembler) type_error() { - self.Link(_LB_type_error) // _type_error: - self.Emit("MOVQ", _ET, jit.Ptr(_SP, 0)) // MOVQ ET, (SP) - self.call_go(_F_error_type) // CALL_GO error_type - self.Emit("MOVQ", jit.Ptr(_SP, 8), _ET) // MOVQ 8(SP), ET - self.Emit("MOVQ", jit.Ptr(_SP, 16), _EP) // MOVQ 16(SP), EP - self.Sjmp("JMP" , _LB_error) // JMP _error -} - - -func (self *_Assembler) mismatch_error() { - self.Link(_LB_mismatch_error) // _type_error: - self.Emit("MOVQ", _VAR_et, _ET) // MOVQ _VAR_et, ET - self.Emit("MOVQ", _VAR_ic, _EP) // MOVQ _VAR_ic, EP - self.Emit("MOVQ", _I_json_MismatchTypeError, _AX) // MOVQ _I_json_MismatchTypeError, AX - self.Emit("CMPQ", _ET, _AX) // CMPQ ET, AX - self.Sjmp("JE" , _LB_error) // JE _LB_error - self.Emit("MOVQ", _ARG_sp, _AX) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ", _ARG_sl, _CX) - self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP) - self.Emit("MOVQ", _VAR_ic, _AX) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP) - self.Emit("MOVQ", _VAR_et, _CX) - self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24)) // MOVQ CX, 24(SP) - self.call_go(_F_error_mismatch) // CALL_GO error_type - self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -func (self *_Assembler) _asm_OP_dismatch_err(p *_Instr) { - self.Emit("MOVQ", _IC, _VAR_ic) - self.Emit("MOVQ", jit.Type(p.vt()), _ET) - self.Emit("MOVQ", _ET, _VAR_et) -} - -func (self *_Assembler) _asm_OP_go_skip(p *_Instr) { - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9 - self.Xref(p.vi(), 4) - self.Emit("MOVQ", _R9, _VAR_pc) - self.Sjmp("JMP" , _LB_skip_one) // JMP _skip_one -} - -func (self *_Assembler) skip_one() { - self.Link(_LB_skip_one) // _skip: - self.Emit("MOVQ", _VAR_ic, _IC) // MOVQ _VAR_ic, IC - self.call_sf(_F_skip_one) // CALL_SF skip_one - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v - self.Emit("MOVQ" , _VAR_pc, _R9) // MOVQ pc, R9 - self.Rjmp("JMP" , _R9) // JMP (R9) -} - - -func (self *_Assembler) skip_key_value() { - self.Link(_LB_skip_key_value) // _skip: - // skip the key - self.Emit("MOVQ", _VAR_ic, _IC) // MOVQ _VAR_ic, IC - self.call_sf(_F_skip_one) // CALL_SF skip_one - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v - // match char ':' - self.lspace("_global_1") - self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(':')) - self.Sjmp("JNE" , _LB_parsing_error_v) // JNE _parse_error_v - self.Emit("ADDQ", jit.Imm(1), _IC) // ADDQ $1, IC - self.lspace("_global_2") - // skip the value - self.call_sf(_F_skip_one) // CALL_SF skip_one - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v - // jump back to specified address - self.Emit("MOVQ" , _VAR_pc, _R9) // MOVQ pc, R9 - self.Rjmp("JMP" , _R9) // JMP (R9) -} - -func (self *_Assembler) field_error() { - self.Link(_LB_field_error) // _field_error: - self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0 - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0)) // MOVOU X0, (SP) - self.call_go(_F_error_field) // CALL_GO error_field - self.Emit("MOVQ" , jit.Ptr(_SP, 16), _ET) // MOVQ 16(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 24), _EP) // MOVQ 24(SP), EP - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -func (self *_Assembler) range_error() { - self.Link(_LB_range_error) // _range_error: - self.slice_from(_VAR_st_Ep, 0) // SLICE st.Ep, $0 - self.Emit("MOVQ", _DI, jit.Ptr(_SP, 0)) // MOVQ DI, (SP) - self.Emit("MOVQ", _SI, jit.Ptr(_SP, 8)) // MOVQ SI, 8(SP) - self.Emit("MOVQ", _ET, jit.Ptr(_SP, 16)) // MOVQ ET, 16(SP) - self.Emit("MOVQ", _EP, jit.Ptr(_SP, 24)) // MOVQ EP, 24(SP) - self.call_go(_F_error_value) // CALL_GO error_value - self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -func (self *_Assembler) stack_error() { - self.Link(_LB_stack_error) // _stack_error: - self.Emit("MOVQ", _V_stackOverflow, _EP) // MOVQ ${_V_stackOverflow}, EP - self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ ${_I_json_UnsupportedValueError}, ET - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -func (self *_Assembler) base64_error() { - self.Link(_LB_base64_error) - self.Emit("NEGQ", _AX) // NEGQ AX - self.Emit("SUBQ", jit.Imm(1), _AX) // SUBQ $1, AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.call_go(_F_convT64) // CALL_GO convT64 - self.Emit("MOVQ", jit.Ptr(_SP, 8), _EP) // MOVQ 8(SP), EP - self.Emit("MOVQ", _I_base64_CorruptInputError, _ET) // MOVQ ${itab(base64.CorruptInputError)}, ET - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -func (self *_Assembler) parsing_error() { - self.Link(_LB_eof_error) // _eof_error: - self.Emit("MOVQ" , _IL, _IC) // MOVQ IL, IC - self.Emit("MOVL" , jit.Imm(int64(types.ERR_EOF)), _EP) // MOVL ${types.ERR_EOF}, EP - self.Sjmp("JMP" , _LB_parsing_error) // JMP _parsing_error - self.Link(_LB_unquote_error) // _unquote_error: - self.Emit("SUBQ" , _VAR_sr, _SI) // SUBQ sr, SI - self.Emit("SUBQ" , _SI, _IC) // SUBQ IL, IC - self.Link(_LB_parsing_error_v) // _parsing_error_v: - self.Emit("MOVQ" , _AX, _EP) // MOVQ AX, EP - self.Emit("NEGQ" , _EP) // NEGQ EP - self.Sjmp("JMP" , _LB_parsing_error) // JMP _parsing_error - self.Link(_LB_char_m3_error) // _char_m3_error: - self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC - self.Link(_LB_char_m2_error) // _char_m2_error: - self.Emit("SUBQ" , jit.Imm(2), _IC) // SUBQ $2, IC - self.Sjmp("JMP" , _LB_char_0_error) // JMP _char_0_error - self.Link(_LB_im_error) // _im_error: - self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPB CX, (IP)(IC) - self.Sjmp("JNE" , _LB_char_0_error) // JNE _char_0_error - self.Emit("SHRL" , jit.Imm(8), _CX) // SHRL $8, CX - self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 1)) // CMPB CX, 1(IP)(IC) - self.Sjmp("JNE" , _LB_char_1_error) // JNE _char_1_error - self.Emit("SHRL" , jit.Imm(8), _CX) // SHRL $8, CX - self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 2)) // CMPB CX, 2(IP)(IC) - self.Sjmp("JNE" , _LB_char_2_error) // JNE _char_2_error - self.Sjmp("JMP" , _LB_char_3_error) // JNE _char_3_error - self.Link(_LB_char_4_error) // _char_4_error: - self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC - self.Link(_LB_char_3_error) // _char_3_error: - self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC - self.Link(_LB_char_2_error) // _char_2_error: - self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC - self.Link(_LB_char_1_error) // _char_1_error: - self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC - self.Link(_LB_char_0_error) // _char_0_error: - self.Emit("MOVL" , jit.Imm(int64(types.ERR_INVALID_CHAR)), _EP) // MOVL ${types.ERR_INVALID_CHAR}, EP - self.Link(_LB_parsing_error) // _parsing_error: - self.Emit("MOVOU", _ARG_s, _X0) // MOVOU s, X0 - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0)) // MOVOU X0, (SP) - self.Emit("MOVQ" , _IC, jit.Ptr(_SP, 16)) // MOVQ IC, 16(SP) - self.Emit("MOVQ" , _EP, jit.Ptr(_SP, 24)) // MOVQ EP, 24(SP) - self.call_go(_F_error_wrap) // CALL_GO error_wrap - self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -/** Memory Management Routines **/ - -var ( - _T_byte = jit.Type(byteType) - _F_mallocgc = jit.Func(mallocgc) -) - -func (self *_Assembler) malloc(nb obj.Addr, ret obj.Addr) { - self.Emit("XORL", _AX, _AX) // XORL AX, AX - self.Emit("MOVQ", _T_byte, _CX) // MOVQ ${type(byte)}, CX - self.Emit("MOVQ", nb, jit.Ptr(_SP, 0)) // MOVQ ${nb}, (SP) - self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP) - self.call_go(_F_mallocgc) // CALL_GO mallocgc - self.Emit("MOVQ", jit.Ptr(_SP, 24), ret) // MOVQ 24(SP), ${ret} -} - -func (self *_Assembler) valloc(vt reflect.Type, ret obj.Addr) { - self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX) // MOVQ ${vt.Size()}, AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ", jit.Type(vt), _AX) // MOVQ ${vt}, AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - self.Emit("MOVB", jit.Imm(1), jit.Ptr(_SP, 16)) // MOVB $1, 16(SP) - self.call_go(_F_mallocgc) // CALL_GO mallocgc - self.Emit("MOVQ", jit.Ptr(_SP, 24), ret) // MOVQ 24(SP), ${ret} -} - -func (self *_Assembler) vfollow(vt reflect.Type) { - self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JNZ" , "_end_{n}") // JNZ _end_{n} - self.valloc(vt, _AX) // VALLOC ${vt}, AX - self.WritePtrAX(1, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP) - self.Link("_end_{n}") // _end_{n}: - self.Emit("MOVQ" , _AX, _VP) // MOVQ AX, VP -} - -/** Value Parsing Routines **/ - -var ( - _F_vstring = jit.Imm(int64(native.S_vstring)) - _F_vnumber = jit.Imm(int64(native.S_vnumber)) - _F_vsigned = jit.Imm(int64(native.S_vsigned)) - _F_vunsigned = jit.Imm(int64(native.S_vunsigned)) -) - -func (self *_Assembler) check_err(vt reflect.Type, pin string, pin2 int) { - self.Emit("MOVQ" , _VAR_st_Vt, _AX) // MOVQ st.Vt, AX - self.Emit("TESTQ", _AX, _AX) // CMPQ AX, ${native.V_STRING} - // try to skip the value - if vt != nil { - self.Sjmp("JNS" , "_check_err_{n}") // JNE _parsing_error_v - self.Emit("MOVQ", jit.Type(vt), _ET) - self.Emit("MOVQ", _ET, _VAR_et) - if pin2 != -1 { - self.Emit("SUBQ", jit.Imm(1), _BP) - self.Emit("MOVQ", _BP, _VAR_ic) - self.Byte(0x4c , 0x8d, 0x0d) // LEAQ (PC), R9 - self.Xref(pin2, 4) - self.Emit("MOVQ", _R9, _VAR_pc) - self.Sjmp("JMP" , _LB_skip_key_value) - } else { - self.Emit("MOVQ", _BP, _VAR_ic) - self.Byte(0x4c , 0x8d, 0x0d) // LEAQ (PC), R9 - self.Sref(pin, 4) - self.Emit("MOVQ", _R9, _VAR_pc) - self.Sjmp("JMP" , _LB_skip_one) - } - self.Link("_check_err_{n}") - } else { - self.Sjmp("JS" , _LB_parsing_error_v) // JNE _parsing_error_v - } -} - -func (self *_Assembler) check_eof(d int64) { - if d == 1 { - self.Emit("CMPQ", _IC, _IL) // CMPQ IC, IL - self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error - } else { - self.Emit("LEAQ", jit.Ptr(_IC, d), _AX) // LEAQ ${d}(IC), AX - self.Emit("CMPQ", _AX, _IL) // CMPQ AX, IL - self.Sjmp("JA" , _LB_eof_error) // JA _eof_error - } -} - -func (self *_Assembler) parse_string() { // parse_string has a validate flag params in the last - self.Emit("MOVQ", _ARG_fv, _CX) - self.call_vf(_F_vstring) - self.check_err(nil, "", -1) -} - -func (self *_Assembler) parse_number(vt reflect.Type, pin string, pin2 int) { - self.Emit("MOVQ", _IC, _BP) - self.call_vf(_F_vnumber) // call vnumber - self.check_err(vt, pin, pin2) -} - -func (self *_Assembler) parse_signed(vt reflect.Type, pin string, pin2 int) { - self.Emit("MOVQ", _IC, _BP) - self.call_vf(_F_vsigned) - self.check_err(vt, pin, pin2) -} - -func (self *_Assembler) parse_unsigned(vt reflect.Type, pin string, pin2 int) { - self.Emit("MOVQ", _IC, _BP) - self.call_vf(_F_vunsigned) - self.check_err(vt, pin, pin2) -} - -// Pointer: DI, Size: SI, Return: R9 -func (self *_Assembler) copy_string() { - self.Link("_copy_string") - self.Emit("MOVQ", _DI, _VAR_bs_p) - self.Emit("MOVQ", _SI, _VAR_bs_n) - self.Emit("MOVQ", _R9, _VAR_bs_LR) - self.malloc(_SI, _AX) - self.Emit("MOVQ", _AX, _VAR_sv_p) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) - self.Emit("MOVQ", _VAR_bs_p, _DI) - self.Emit("MOVQ", _DI, jit.Ptr(_SP, 8)) - self.Emit("MOVQ", _VAR_bs_n, _SI) - self.Emit("MOVQ", _SI, jit.Ptr(_SP, 16)) - self.call_go(_F_memmove) - self.Emit("MOVQ", _VAR_sv_p, _DI) - self.Emit("MOVQ", _VAR_bs_n, _SI) - self.Emit("MOVQ", _VAR_bs_LR, _R9) - self.Rjmp("JMP", _R9) -} - -// Pointer: DI, Size: SI, Return: R9 -func (self *_Assembler) escape_string() { - self.Link("_escape_string") - self.Emit("MOVQ" , _DI, _VAR_bs_p) - self.Emit("MOVQ" , _SI, _VAR_bs_n) - self.Emit("MOVQ" , _R9, _VAR_bs_LR) - self.malloc(_SI, _DX) // MALLOC SI, DX - self.Emit("MOVQ" , _DX, _VAR_sv_p) - self.Emit("MOVQ" , _VAR_bs_p, _DI) - self.Emit("MOVQ" , _VAR_bs_n, _SI) - self.Emit("LEAQ" , _VAR_sr, _CX) // LEAQ sr, CX - self.Emit("XORL" , _R8, _R8) // XORL R8, R8 - self.Emit("BTQ" , jit.Imm(_F_disable_urc), _ARG_fv) // BTQ ${_F_disable_urc}, fv - self.Emit("SETCC", _R8) // SETCC R8 - self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8) // SHLQ ${types.B_UNICODE_REPLACE}, R8 - self.call(_F_unquote) // CALL unquote - self.Emit("MOVQ" , _VAR_bs_n, _SI) // MOVQ ${n}, SI - self.Emit("ADDQ" , jit.Imm(1), _SI) // ADDQ $1, SI - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_unquote_error) // JS _unquote_error - self.Emit("MOVQ" , _AX, _SI) - self.Emit("MOVQ" , _VAR_sv_p, _DI) - self.Emit("MOVQ" , _VAR_bs_LR, _R9) - self.Rjmp("JMP", _R9) -} - -func (self *_Assembler) escape_string_twice() { - self.Link("_escape_string_twice") - self.Emit("MOVQ" , _DI, _VAR_bs_p) - self.Emit("MOVQ" , _SI, _VAR_bs_n) - self.Emit("MOVQ" , _R9, _VAR_bs_LR) - self.malloc(_SI, _DX) // MALLOC SI, DX - self.Emit("MOVQ" , _DX, _VAR_sv_p) - self.Emit("MOVQ" , _VAR_bs_p, _DI) - self.Emit("MOVQ" , _VAR_bs_n, _SI) - self.Emit("LEAQ" , _VAR_sr, _CX) // LEAQ sr, CX - self.Emit("MOVL" , jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8 - self.Emit("BTQ" , jit.Imm(_F_disable_urc), _ARG_fv) // BTQ ${_F_disable_urc}, AX - self.Emit("XORL" , _AX, _AX) // XORL AX, AX - self.Emit("SETCC", _AX) // SETCC AX - self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _AX) // SHLQ ${types.B_UNICODE_REPLACE}, AX - self.Emit("ORQ" , _AX, _R8) // ORQ AX, R8 - self.call(_F_unquote) // CALL unquote - self.Emit("MOVQ" , _VAR_bs_n, _SI) // MOVQ ${n}, SI - self.Emit("ADDQ" , jit.Imm(3), _SI) // ADDQ $3, SI - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_unquote_error) // JS _unquote_error - self.Emit("MOVQ" , _AX, _SI) - self.Emit("MOVQ" , _VAR_sv_p, _DI) - self.Emit("MOVQ" , _VAR_bs_LR, _R9) - self.Rjmp("JMP", _R9) -} - -/** Range Checking Routines **/ - -var ( - _V_max_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_max_f32)))) - _V_min_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_min_f32)))) -) - -var ( - _Vp_max_f32 = new(float64) - _Vp_min_f32 = new(float64) -) - -func init() { - *_Vp_max_f32 = math.MaxFloat32 - *_Vp_min_f32 = -math.MaxFloat32 -} - -func (self *_Assembler) range_single() { - self.Emit("MOVSD" , _VAR_st_Dv, _X0) // MOVSD st.Dv, X0 - self.Emit("MOVQ" , _V_max_f32, _AX) // MOVQ _max_f32, AX - self.Emit("MOVQ" , jit.Gitab(_I_float32), _ET) // MOVQ ${itab(float32)}, ET - self.Emit("MOVQ" , jit.Gtype(_T_float32), _EP) // MOVQ ${type(float32)}, EP - self.Emit("UCOMISD" , jit.Ptr(_AX, 0), _X0) // UCOMISD (AX), X0 - self.Sjmp("JA" , _LB_range_error) // JA _range_error - self.Emit("MOVQ" , _V_min_f32, _AX) // MOVQ _min_f32, AX - self.Emit("MOVSD" , jit.Ptr(_AX, 0), _X1) // MOVSD (AX), X1 - self.Emit("UCOMISD" , _X0, _X1) // UCOMISD X0, X1 - self.Sjmp("JA" , _LB_range_error) // JA _range_error - self.Emit("CVTSD2SS", _X0, _X0) // CVTSD2SS X0, X0 -} - -func (self *_Assembler) range_signed(i *rt.GoItab, t *rt.GoType, a int64, b int64) { - self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX - self.Emit("MOVQ", jit.Gitab(i), _ET) // MOVQ ${i}, ET - self.Emit("MOVQ", jit.Gtype(t), _EP) // MOVQ ${t}, EP - self.Emit("CMPQ", _AX, jit.Imm(a)) // CMPQ AX, ${a} - self.Sjmp("JL" , _LB_range_error) // JL _range_error - self.Emit("CMPQ", _AX, jit.Imm(b)) // CMPQ AX, ${B} - self.Sjmp("JG" , _LB_range_error) // JG _range_error -} - -func (self *_Assembler) range_unsigned(i *rt.GoItab, t *rt.GoType, v uint64) { - self.Emit("MOVQ" , _VAR_st_Iv, _AX) // MOVQ st.Iv, AX - self.Emit("MOVQ" , jit.Gitab(i), _ET) // MOVQ ${i}, ET - self.Emit("MOVQ" , jit.Gtype(t), _EP) // MOVQ ${t}, EP - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_range_error) // JS _range_error - self.Emit("CMPQ" , _AX, jit.Imm(int64(v))) // CMPQ AX, ${a} - self.Sjmp("JA" , _LB_range_error) // JA _range_error -} - -/** String Manipulating Routines **/ - -var ( - _F_unquote = jit.Imm(int64(native.S_unquote)) -) - -func (self *_Assembler) slice_from(p obj.Addr, d int64) { - self.Emit("MOVQ", p, _SI) // MOVQ ${p}, SI - self.slice_from_r(_SI, d) // SLICE_R SI, ${d} -} - -func (self *_Assembler) slice_from_r(p obj.Addr, d int64) { - self.Emit("LEAQ", jit.Sib(_IP, p, 1, 0), _DI) // LEAQ (IP)(${p}), DI - self.Emit("NEGQ", p) // NEGQ ${p} - self.Emit("LEAQ", jit.Sib(_IC, p, 1, d), _SI) // LEAQ d(IC)(${p}), SI -} - -func (self *_Assembler) unquote_once(p obj.Addr, n obj.Addr, stack bool, copy bool) { - self.slice_from(_VAR_st_Iv, -1) // SLICE st.Iv, $-1 - self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1)) // CMPQ st.Ep, $-1 - self.Sjmp("JE" , "_noescape_{n}") // JE _noescape_{n} - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9 - self.Sref("_unquote_once_write_{n}", 4) - self.Sjmp("JMP" , "_escape_string") - self.Link("_noescape_{n}") // _noescape_{n}: - if copy { - self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv) - self.Sjmp("JNC", "_unquote_once_write_{n}") - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9 - self.Sref("_unquote_once_write_{n}", 4) - self.Sjmp("JMP", "_copy_string") - } - self.Link("_unquote_once_write_{n}") - self.Emit("MOVQ" , _SI, n) // MOVQ SI, ${n} - if stack { - self.Emit("MOVQ", _DI, p) - } else { - self.WriteRecNotAX(10, _DI, p, false, false) - } -} - -func (self *_Assembler) unquote_twice(p obj.Addr, n obj.Addr, stack bool) { - self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1)) // CMPQ st.Ep, $-1 - self.Sjmp("JE" , _LB_eof_error) // JE _eof_error - self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -3), jit.Imm('\\')) // CMPB -3(IP)(IC), $'\\' - self.Sjmp("JNE" , _LB_char_m3_error) // JNE _char_m3_error - self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -2), jit.Imm('"')) // CMPB -2(IP)(IC), $'"' - self.Sjmp("JNE" , _LB_char_m2_error) // JNE _char_m2_error - self.slice_from(_VAR_st_Iv, -3) // SLICE st.Iv, $-3 - self.Emit("MOVQ" , _SI, _AX) // MOVQ SI, AX - self.Emit("ADDQ" , _VAR_st_Iv, _AX) // ADDQ st.Iv, AX - self.Emit("CMPQ" , _VAR_st_Ep, _AX) // CMPQ st.Ep, AX - self.Sjmp("JE" , "_noescape_{n}") // JE _noescape_{n} - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9 - self.Sref("_unquote_twice_write_{n}", 4) - self.Sjmp("JMP" , "_escape_string_twice") - self.Link("_noescape_{n}") // _noescape_{n}: - self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv) - self.Sjmp("JNC", "_unquote_twice_write_{n}") - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9 - self.Sref("_unquote_twice_write_{n}", 4) - self.Sjmp("JMP", "_copy_string") - self.Link("_unquote_twice_write_{n}") - self.Emit("MOVQ" , _SI, n) // MOVQ SI, ${n} - if stack { - self.Emit("MOVQ", _DI, p) - } else { - self.WriteRecNotAX(12, _DI, p, false, false) - } -} - -/** Memory Clearing Routines **/ - -var ( - _F_memclrHasPointers = jit.Func(memclrHasPointers) - _F_memclrNoHeapPointers = jit.Func(memclrNoHeapPointers) -) - -func (self *_Assembler) mem_clear_fn(ptrfree bool) { - if !ptrfree { - self.call_go(_F_memclrHasPointers) - } else { - self.call_go(_F_memclrNoHeapPointers) - } -} - -func (self *_Assembler) mem_clear_rem(size int64, ptrfree bool) { - self.Emit("MOVQ", jit.Imm(size), _CX) // MOVQ ${size}, CX - self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _AX) // MOVQ (ST)(AX), AX - self.Emit("SUBQ", _VP, _AX) // SUBQ VP, AX - self.Emit("ADDQ", _AX, _CX) // ADDQ AX, CX - self.Emit("MOVQ", _VP, jit.Ptr(_SP, 0)) // MOVQ VP, (SP) - self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP) - self.mem_clear_fn(ptrfree) // CALL_GO memclr{Has,NoHeap}Pointers -} - -/** Map Assigning Routines **/ - -var ( - _F_mapassign = jit.Func(mapassign) - _F_mapassign_fast32 = jit.Func(mapassign_fast32) - _F_mapassign_faststr = jit.Func(mapassign_faststr) - _F_mapassign_fast64ptr = jit.Func(mapassign_fast64ptr) -) - -var ( - _F_decodeJsonUnmarshaler obj.Addr - _F_decodeTextUnmarshaler obj.Addr -) - -func init() { - _F_decodeJsonUnmarshaler = jit.Func(decodeJsonUnmarshaler) - _F_decodeTextUnmarshaler = jit.Func(decodeTextUnmarshaler) -} - -func (self *_Assembler) mapaccess_ptr(t reflect.Type) { - if rt.MapType(rt.UnpackType(t)).IndirectElem() { - self.vfollow(t.Elem()) - } -} - -func (self *_Assembler) mapassign_std(t reflect.Type, v obj.Addr) { - self.Emit("LEAQ", v, _AX) // LEAQ ${v}, AX - self.mapassign_call(t, _F_mapassign) // MAPASSIGN ${t}, mapassign -} - -func (self *_Assembler) mapassign_str_fast(t reflect.Type, p obj.Addr, n obj.Addr) { - self.Emit("MOVQ", jit.Type(t), _AX) // MOVQ ${t}, AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8)) // MOVQ VP, 8(SP) - self.Emit("MOVQ", p, jit.Ptr(_SP, 16)) // MOVQ ${p}, 16(SP) - self.Emit("MOVQ", n, jit.Ptr(_SP, 24)) // MOVQ ${n}, 24(SP) - self.call_go(_F_mapassign_faststr) // CALL_GO ${fn} - self.Emit("MOVQ", jit.Ptr(_SP, 32), _VP) // MOVQ 32(SP), VP - self.mapaccess_ptr(t) -} - -func (self *_Assembler) mapassign_call(t reflect.Type, fn obj.Addr) { - self.Emit("MOVQ", jit.Type(t), _SI) // MOVQ ${t}, SI - self.Emit("MOVQ", _SI, jit.Ptr(_SP, 0)) // MOVQ SI, (SP) - self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8)) // MOVQ VP, 8(SP) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP) - self.call_go(fn) // CALL_GO ${fn} - self.Emit("MOVQ", jit.Ptr(_SP, 24), _VP) // MOVQ 24(SP), VP -} - -func (self *_Assembler) mapassign_fastx(t reflect.Type, fn obj.Addr) { - self.mapassign_call(t, fn) - self.mapaccess_ptr(t) -} - -func (self *_Assembler) mapassign_utext(t reflect.Type, addressable bool) { - pv := false - vk := t.Key() - tk := t.Key() - - /* deref pointer if needed */ - if vk.Kind() == reflect.Ptr { - pv = true - vk = vk.Elem() - } - - /* addressable value with pointer receiver */ - if addressable { - pv = false - tk = reflect.PtrTo(tk) - } - - /* allocate the key, and call the unmarshaler */ - self.valloc(vk, _DI) // VALLOC ${vk}, DI - // must spill vk pointer since next call_go may invoke GC - self.Emit("MOVQ" , _DI, _VAR_vk) - self.Emit("MOVQ" , jit.Type(tk), _AX) // MOVQ ${tk}, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ" , _DI, jit.Ptr(_SP, 8)) // MOVQ DI, 8(SP) - self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0 - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 16)) // MOVOU X0, 16(SP) - self.call_go(_F_decodeTextUnmarshaler) // CALL_GO decodeTextUnmarshaler - self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error - self.Emit("MOVQ" , _VAR_vk, _AX) - - /* select the correct assignment function */ - if !pv { - self.mapassign_call(t, _F_mapassign) - } else { - self.mapassign_fastx(t, _F_mapassign_fast64ptr) - } -} - -/** External Unmarshaler Routines **/ - -var ( - _F_skip_one = jit.Imm(int64(native.S_skip_one)) - _F_skip_number = jit.Imm(int64(native.S_skip_number)) -) - -func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool) { - self.call_sf(_F_skip_one) // CALL_SF skip_one - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v - self.slice_from_r(_AX, 0) // SLICE_R AX, $0 - self.Emit("MOVQ" , _DI, _VAR_sv_p) // MOVQ DI, sv.p - self.Emit("MOVQ" , _SI, _VAR_sv_n) // MOVQ SI, sv.n - self.unmarshal_func(t, _F_decodeJsonUnmarshaler, deref) // UNMARSHAL json, ${t}, ${deref} -} - -func (self *_Assembler) unmarshal_text(t reflect.Type, deref bool) { - self.parse_string() // PARSE STRING - self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true) // UNQUOTE once, sv.p, sv.n - self.unmarshal_func(t, _F_decodeTextUnmarshaler, deref) // UNMARSHAL text, ${t}, ${deref} -} - -func (self *_Assembler) unmarshal_func(t reflect.Type, fn obj.Addr, deref bool) { - pt := t - vk := t.Kind() - - /* allocate the field if needed */ - if deref && vk == reflect.Ptr { - self.Emit("MOVQ" , _VP, _AX) // MOVQ VP, AX - self.Emit("MOVQ" , jit.Ptr(_AX, 0), _AX) // MOVQ (AX), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JNZ" , "_deref_{n}") // JNZ _deref_{n} - self.valloc(t.Elem(), _AX) // VALLOC ${t.Elem()}, AX - self.WritePtrAX(3, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP) - self.Link("_deref_{n}") // _deref_{n}: - } - - /* set value type */ - self.Emit("MOVQ", jit.Type(pt), _CX) // MOVQ ${pt}, CX - self.Emit("MOVQ", _CX, jit.Ptr(_SP, 0)) // MOVQ CX, (SP) - - /* set value pointer */ - if deref && vk == reflect.Ptr { - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - } else { - self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8)) // MOVQ VP, 8(SP) - } - - /* set the source string and call the unmarshaler */ - self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0 - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 16)) // MOVOU X0, 16(SP) - self.call_go(fn) // CALL_GO ${fn} - self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error -} - -/** Dynamic Decoding Routine **/ - -var ( - _F_decodeTypedPointer obj.Addr -) - -func init() { - _F_decodeTypedPointer = jit.Func(decodeTypedPointer) -} - -func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) { - self.Emit("MOVQ" , _ARG_fv, _CX) // MOVQ fv, CX - self.Emit("MOVOU", _ARG_sp, _X0) // MOVOU sp, X0 - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0)) // MOVOU X0, (SP) - self.Emit("MOVQ" , _IC, jit.Ptr(_SP, 16)) // MOVQ IC, 16(SP) - self.Emit("MOVQ" , vt, jit.Ptr(_SP, 24)) // MOVQ ${vt}, 24(SP) - self.Emit("MOVQ" , vp, jit.Ptr(_SP, 32)) // MOVQ ${vp}, 32(SP) - self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 40)) // MOVQ ST, 40(SP) - self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 48)) // MOVQ CX, 48(SP) - self.call_go(_F_decodeTypedPointer) // CALL_GO decodeTypedPointer - self.Emit("MOVQ" , jit.Ptr(_SP, 64), _ET) // MOVQ 64(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 72), _EP) // MOVQ 72(SP), EP - self.Emit("MOVQ" , jit.Ptr(_SP, 56), _IC) // MOVQ 56(SP), IC - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JE", "_decode_dynamic_end_{n}") // JE, _decode_dynamic_end_{n} - self.Emit("MOVQ", _I_json_MismatchTypeError, _AX) // MOVQ _I_json_MismatchTypeError, AX - self.Emit("CMPQ", _ET, _AX) // CMPQ ET, AX - self.Sjmp("JNE" , _LB_error) // JNE LB_error - self.Emit("MOVQ", _EP, _VAR_ic) // MOVQ EP, VAR_ic - self.Emit("MOVQ", _ET, _VAR_et) // MOVQ ET, VAR_et - self.Link("_decode_dynamic_end_{n}") - -} - -/** OpCode Assembler Functions **/ - -var ( - _F_memequal = jit.Func(memequal) - _F_memmove = jit.Func(memmove) - _F_growslice = jit.Func(growslice) - _F_makeslice = jit.Func(makeslice) - _F_makemap_small = jit.Func(makemap_small) - _F_mapassign_fast64 = jit.Func(mapassign_fast64) -) - -var ( - _F_lspace = jit.Imm(int64(native.S_lspace)) - _F_strhash = jit.Imm(int64(caching.S_strhash)) -) - -var ( - _F_b64decode = jit.Imm(int64(_subr__b64decode)) - _F_decodeValue = jit.Imm(int64(_subr_decode_value)) -) - -var ( - _F_skip_array = jit.Imm(int64(native.S_skip_array)) - _F_skip_object = jit.Imm(int64(native.S_skip_object)) -) - -var ( - _F_FieldMap_GetCaseInsensitive obj.Addr - _Empty_Slice = make([]byte, 0) - _Zero_Base = int64(uintptr(((*rt.GoSlice)(unsafe.Pointer(&_Empty_Slice))).Ptr)) -) - -const ( - _MODE_AVX2 = 1 << 2 -) - -const ( - _Fe_ID = int64(unsafe.Offsetof(caching.FieldEntry{}.ID)) - _Fe_Name = int64(unsafe.Offsetof(caching.FieldEntry{}.Name)) - _Fe_Hash = int64(unsafe.Offsetof(caching.FieldEntry{}.Hash)) -) - -const ( - _Vk_Ptr = int64(reflect.Ptr) - _Gt_KindFlags = int64(unsafe.Offsetof(rt.GoType{}.KindFlags)) -) - -func init() { - _F_FieldMap_GetCaseInsensitive = jit.Func((*caching.FieldMap).GetCaseInsensitive) -} - -func (self *_Assembler) _asm_OP_any(_ *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_VP, 8), _CX) // MOVQ 8(VP), CX - self.Emit("TESTQ" , _CX, _CX) // TESTQ CX, CX - self.Sjmp("JZ" , "_decode_{n}") // JZ _decode_{n} - self.Emit("CMPQ" , _CX, _VP) // CMPQ CX, VP - self.Sjmp("JE" , "_decode_{n}") // JE _decode_{n} - self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX - self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX) // MOVBLZX _Gt_KindFlags(AX), DX - self.Emit("ANDL" , jit.Imm(rt.F_kind_mask), _DX) // ANDL ${F_kind_mask}, DX - self.Emit("CMPL" , _DX, jit.Imm(_Vk_Ptr)) // CMPL DX, ${reflect.Ptr} - self.Sjmp("JNE" , "_decode_{n}") // JNE _decode_{n} - self.Emit("LEAQ" , jit.Ptr(_VP, 8), _DI) // LEAQ 8(VP), DI - self.decode_dynamic(_AX, _DI) // DECODE AX, DI - self.Sjmp("JMP" , "_decode_end_{n}") // JMP _decode_end_{n} - self.Link("_decode_{n}") // _decode_{n}: - self.Emit("MOVQ" , _ARG_fv, _DF) // MOVQ fv, DF - self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 0)) // MOVQ _ST, (SP) - self.call(_F_decodeValue) // CALL decodeValue - self.Emit("TESTQ" , _EP, _EP) // TESTQ EP, EP - self.Sjmp("JNZ" , _LB_parsing_error) // JNZ _parsing_error - self.Link("_decode_end_{n}") // _decode_end_{n}: -} - -func (self *_Assembler) _asm_OP_dyn(p *_Instr) { - self.Emit("MOVQ" , jit.Type(p.vt()), _ET) // MOVQ ${p.vt()}, ET - self.Emit("CMPQ" , jit.Ptr(_VP, 8), jit.Imm(0)) // CMPQ 8(VP), $0 - self.Sjmp("JE" , _LB_type_error) // JE _type_error - self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX - self.Emit("MOVQ" , jit.Ptr(_AX, 8), _AX) // MOVQ 8(AX), AX - self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX) // MOVBLZX _Gt_KindFlags(AX), DX - self.Emit("ANDL" , jit.Imm(rt.F_kind_mask), _DX) // ANDL ${F_kind_mask}, DX - self.Emit("CMPL" , _DX, jit.Imm(_Vk_Ptr)) // CMPL DX, ${reflect.Ptr} - self.Sjmp("JNE" , _LB_type_error) // JNE _type_error - self.Emit("LEAQ" , jit.Ptr(_VP, 8), _DI) // LEAQ 8(VP), DI - self.decode_dynamic(_AX, _DI) // DECODE AX, DI - self.Link("_decode_end_{n}") // _decode_end_{n}: -} - -func (self *_Assembler) _asm_OP_str(_ *_Instr) { - self.parse_string() // PARSE STRING - self.unquote_once(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false, true) // UNQUOTE once, (VP), 8(VP) -} - -func (self *_Assembler) _asm_OP_bin(_ *_Instr) { - self.parse_string() // PARSE STRING - self.slice_from(_VAR_st_Iv, -1) // SLICE st.Iv, $-1 - self.Emit("MOVQ" , _DI, jit.Ptr(_VP, 0)) // MOVQ DI, (VP) - self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 8)) // MOVQ SI, 8(VP) - self.Emit("SHRQ" , jit.Imm(2), _SI) // SHRQ $2, SI - self.Emit("LEAQ" , jit.Sib(_SI, _SI, 2, 0), _SI) // LEAQ (SI)(SI*2), SI - self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16)) // MOVQ SI, 16(VP) - self.malloc(_SI, _SI) // MALLOC SI, SI - - // TODO: due to base64x's bug, only use AVX mode now - self.Emit("MOVL", jit.Imm(_MODE_JSON), _CX) // MOVL $_MODE_JSON, CX - - /* call the decoder */ - self.Emit("XORL" , _DX, _DX) // XORL DX, DX - self.Emit("MOVQ" , _VP, _DI) // MOVQ VP, DI - - self.Emit("MOVQ" , jit.Ptr(_VP, 0), _R9) // MOVQ SI, (VP) - self.WriteRecNotAX(4, _SI, jit.Ptr(_VP, 0), true, false) // XCHGQ SI, (VP) - self.Emit("MOVQ" , _R9, _SI) - - self.Emit("XCHGQ", _DX, jit.Ptr(_VP, 8)) // XCHGQ DX, 8(VP) - self.call(_F_b64decode) // CALL b64decode - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_base64_error) // JS _base64_error - self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP) -} - -func (self *_Assembler) _asm_OP_bool(_ *_Instr) { - self.Emit("LEAQ", jit.Ptr(_IC, 4), _AX) // LEAQ 4(IC), AX - self.Emit("CMPQ", _AX, _IL) // CMPQ AX, IL - self.Sjmp("JA" , _LB_eof_error) // JA _eof_error - self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('f')) // CMPB (IP)(IC), $'f' - self.Sjmp("JE" , "_false_{n}") // JE _false_{n} - self.Emit("MOVL", jit.Imm(_IM_true), _CX) // MOVL $"true", CX - self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPL CX, (IP)(IC) - self.Sjmp("JE" , "_bool_true_{n}") - - // try to skip the value - self.Emit("MOVQ", _IC, _VAR_ic) - self.Emit("MOVQ", _T_bool, _ET) - self.Emit("MOVQ", _ET, _VAR_et) - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9 - self.Sref("_end_{n}", 4) - self.Emit("MOVQ", _R9, _VAR_pc) - self.Sjmp("JMP" , _LB_skip_one) - - self.Link("_bool_true_{n}") - self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC - self.Emit("MOVB", jit.Imm(1), jit.Ptr(_VP, 0)) // MOVB $1, (VP) - self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n} - self.Link("_false_{n}") // _false_{n}: - self.Emit("ADDQ", jit.Imm(1), _AX) // ADDQ $1, AX - self.Emit("ADDQ", jit.Imm(1), _IC) // ADDQ $1, IC - self.Emit("CMPQ", _AX, _IL) // CMPQ AX, IL - self.Sjmp("JA" , _LB_eof_error) // JA _eof_error - self.Emit("MOVL", jit.Imm(_IM_alse), _CX) // MOVL $"alse", CX - self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPL CX, (IP)(IC) - self.Sjmp("JNE" , _LB_im_error) // JNE _im_error - self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC - self.Emit("XORL", _AX, _AX) // XORL AX, AX - self.Emit("MOVB", _AX, jit.Ptr(_VP, 0)) // MOVB AX, (VP) - self.Link("_end_{n}") // _end_{n}: -} - -func (self *_Assembler) _asm_OP_num(_ *_Instr) { - self.Emit("MOVQ", jit.Imm(0), _VAR_fl) - self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"')) - self.Emit("MOVQ", _IC, _BP) - self.Sjmp("JNE", "_skip_number_{n}") - self.Emit("MOVQ", jit.Imm(1), _VAR_fl) - self.Emit("ADDQ", jit.Imm(1), _IC) - self.Link("_skip_number_{n}") - - /* call skip_number */ - self.call_sf(_F_skip_number) // CALL_SF skip_one - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JNS" , "_num_next_{n}") - - /* call skip one */ - self.Emit("MOVQ", _BP, _VAR_ic) - self.Emit("MOVQ", _T_number, _ET) - self.Emit("MOVQ", _ET, _VAR_et) - self.Byte(0x4c, 0x8d, 0x0d) - self.Sref("_num_end_{n}", 4) - self.Emit("MOVQ", _R9, _VAR_pc) - self.Sjmp("JMP" , _LB_skip_one) - - /* assgin string */ - self.Link("_num_next_{n}") - self.slice_from_r(_AX, 0) - self.Emit("BTQ", jit.Imm(_F_copy_string), _ARG_fv) - self.Sjmp("JNC", "_num_write_{n}") - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9 - self.Sref("_num_write_{n}", 4) - self.Sjmp("JMP", "_copy_string") - self.Link("_num_write_{n}") - self.Emit("MOVQ", _SI, jit.Ptr(_VP, 8)) // MOVQ SI, 8(VP) - self.WriteRecNotAX(13, _DI, jit.Ptr(_VP, 0), false, false) - - /* check if quoted */ - self.Emit("CMPQ", _VAR_fl, jit.Imm(1)) - self.Sjmp("JNE", "_num_end_{n}") - self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"')) - self.Sjmp("JNE", _LB_char_0_error) - self.Emit("ADDQ", jit.Imm(1), _IC) - self.Link("_num_end_{n}") -} - -func (self *_Assembler) _asm_OP_i8(ins *_Instr) { - var pin = "_i8_end_{n}" - self.parse_signed(int8Type, pin, -1) // PARSE int8 - self.range_signed(_I_int8, _T_int8, math.MinInt8, math.MaxInt8) // RANGE int8 - self.Emit("MOVB", _AX, jit.Ptr(_VP, 0)) // MOVB AX, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_i16(ins *_Instr) { - var pin = "_i16_end_{n}" - self.parse_signed(int16Type, pin, -1) // PARSE int16 - self.range_signed(_I_int16, _T_int16, math.MinInt16, math.MaxInt16) // RANGE int16 - self.Emit("MOVW", _AX, jit.Ptr(_VP, 0)) // MOVW AX, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_i32(ins *_Instr) { - var pin = "_i32_end_{n}" - self.parse_signed(int32Type, pin, -1) // PARSE int32 - self.range_signed(_I_int32, _T_int32, math.MinInt32, math.MaxInt32) // RANGE int32 - self.Emit("MOVL", _AX, jit.Ptr(_VP, 0)) // MOVL AX, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_i64(ins *_Instr) { - var pin = "_i64_end_{n}" - self.parse_signed(int64Type, pin, -1) // PARSE int64 - self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX - self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_u8(ins *_Instr) { - var pin = "_u8_end_{n}" - self.parse_unsigned(uint8Type, pin, -1) // PARSE uint8 - self.range_unsigned(_I_uint8, _T_uint8, math.MaxUint8) // RANGE uint8 - self.Emit("MOVB", _AX, jit.Ptr(_VP, 0)) // MOVB AX, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_u16(ins *_Instr) { - var pin = "_u16_end_{n}" - self.parse_unsigned(uint16Type, pin, -1) // PARSE uint16 - self.range_unsigned(_I_uint16, _T_uint16, math.MaxUint16) // RANGE uint16 - self.Emit("MOVW", _AX, jit.Ptr(_VP, 0)) // MOVW AX, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_u32(ins *_Instr) { - var pin = "_u32_end_{n}" - self.parse_unsigned(uint32Type, pin, -1) // PARSE uint32 - self.range_unsigned(_I_uint32, _T_uint32, math.MaxUint32) // RANGE uint32 - self.Emit("MOVL", _AX, jit.Ptr(_VP, 0)) // MOVL AX, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_u64(ins *_Instr) { - var pin = "_u64_end_{n}" - self.parse_unsigned(uint64Type, pin, -1) // PARSE uint64 - self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX - self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_f32(ins *_Instr) { - var pin = "_f32_end_{n}" - self.parse_number(float32Type, pin, -1) // PARSE NUMBER - self.range_single() // RANGE float32 - self.Emit("MOVSS", _X0, jit.Ptr(_VP, 0)) // MOVSS X0, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_f64(ins *_Instr) { - var pin = "_f64_end_{n}" - self.parse_number(float64Type, pin, -1) // PARSE NUMBER - self.Emit("MOVSD", _VAR_st_Dv, _X0) // MOVSD st.Dv, X0 - self.Emit("MOVSD", _X0, jit.Ptr(_VP, 0)) // MOVSD X0, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_unquote(ins *_Instr) { - self.check_eof(2) - self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('\\')) // CMPB (IP)(IC), $'\\' - self.Sjmp("JNE" , _LB_char_0_error) // JNE _char_0_error - self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 1), jit.Imm('"')) // CMPB 1(IP)(IC), $'"' - self.Sjmp("JNE" , _LB_char_1_error) // JNE _char_1_error - self.Emit("ADDQ", jit.Imm(2), _IC) // ADDQ $2, IC - self.parse_string() // PARSE STRING - self.unquote_twice(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false) // UNQUOTE twice, (VP), 8(VP) -} - -func (self *_Assembler) _asm_OP_nil_1(_ *_Instr) { - self.Emit("XORL", _AX, _AX) // XORL AX, AX - self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP) -} - -func (self *_Assembler) _asm_OP_nil_2(_ *_Instr) { - self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0 - self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP) -} - -func (self *_Assembler) _asm_OP_nil_3(_ *_Instr) { - self.Emit("XORL" , _AX, _AX) // XORL AX, AX - self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0 - self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP) - self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 16)) // MOVOU X0, 16(VP) -} - -func (self *_Assembler) _asm_OP_deref(p *_Instr) { - self.vfollow(p.vt()) -} - -func (self *_Assembler) _asm_OP_index(p *_Instr) { - self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ ${p.vi()}, AX - self.Emit("ADDQ", _AX, _VP) // ADDQ _AX, _VP -} - -func (self *_Assembler) _asm_OP_is_null(p *_Instr) { - self.Emit("LEAQ" , jit.Ptr(_IC, 4), _AX) // LEAQ 4(IC), AX - self.Emit("CMPQ" , _AX, _IL) // CMPQ AX, IL - self.Sjmp("JA" , "_not_null_{n}") // JA _not_null_{n} - self.Emit("CMPL" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null)) // CMPL (IP)(IC), $"null" - self.Emit("CMOVQEQ", _AX, _IC) // CMOVQEQ AX, IC - self.Xjmp("JE" , p.vi()) // JE {p.vi()} - self.Link("_not_null_{n}") // _not_null_{n}: -} - -func (self *_Assembler) _asm_OP_is_null_quote(p *_Instr) { - self.Emit("LEAQ" , jit.Ptr(_IC, 5), _AX) // LEAQ 4(IC), AX - self.Emit("CMPQ" , _AX, _IL) // CMPQ AX, IL - self.Sjmp("JA" , "_not_null_quote_{n}") // JA _not_null_quote_{n} - self.Emit("CMPL" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null)) // CMPL (IP)(IC), $"null" - self.Sjmp("JNE" , "_not_null_quote_{n}") // JNE _not_null_quote_{n} - self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, 4), jit.Imm('"')) // CMPB 4(IP)(IC), $'"' - self.Emit("CMOVQEQ", _AX, _IC) // CMOVQEQ AX, IC - self.Xjmp("JE" , p.vi()) // JE {p.vi()} - self.Link("_not_null_quote_{n}") // _not_null_quote_{n}: -} - -func (self *_Assembler) _asm_OP_map_init(_ *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JNZ" , "_end_{n}") // JNZ _end_{n} - self.call_go(_F_makemap_small) // CALL_GO makemap_small - self.Emit("MOVQ" , jit.Ptr(_SP, 0), _AX) // MOVQ (SP), AX - self.WritePtrAX(6, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP) - self.Link("_end_{n}") // _end_{n}: - self.Emit("MOVQ" , _AX, _VP) // MOVQ AX, VP -} - -func (self *_Assembler) _asm_OP_map_key_i8(p *_Instr) { - self.parse_signed(int8Type, "", p.vi()) // PARSE int8 - self.range_signed(_I_int8, _T_int8, math.MinInt8, math.MaxInt8) // RANGE int8 - self.match_char('"') - self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN int8, mapassign, st.Iv -} - -func (self *_Assembler) _asm_OP_map_key_i16(p *_Instr) { - self.parse_signed(int16Type, "", p.vi()) // PARSE int16 - self.range_signed(_I_int16, _T_int16, math.MinInt16, math.MaxInt16) // RANGE int16 - self.match_char('"') - self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN int16, mapassign, st.Iv -} - -func (self *_Assembler) _asm_OP_map_key_i32(p *_Instr) { - self.parse_signed(int32Type, "", p.vi()) // PARSE int32 - self.range_signed(_I_int32, _T_int32, math.MinInt32, math.MaxInt32) // RANGE int32 - self.match_char('"') - if vt := p.vt(); !mapfast(vt) { - self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN int32, mapassign, st.Iv - } else { - self.mapassign_fastx(vt, _F_mapassign_fast32) // MAPASSIGN int32, mapassign_fast32 - } -} - -func (self *_Assembler) _asm_OP_map_key_i64(p *_Instr) { - self.parse_signed(int64Type, "", p.vi()) // PARSE int64 - self.match_char('"') - if vt := p.vt(); !mapfast(vt) { - self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN int64, mapassign, st.Iv - } else { - self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX - self.mapassign_fastx(vt, _F_mapassign_fast64) // MAPASSIGN int64, mapassign_fast64 - } -} - -func (self *_Assembler) _asm_OP_map_key_u8(p *_Instr) { - self.parse_unsigned(uint8Type, "", p.vi()) // PARSE uint8 - self.range_unsigned(_I_uint8, _T_uint8, math.MaxUint8) // RANGE uint8 - self.match_char('"') - self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN uint8, vt.Iv -} - -func (self *_Assembler) _asm_OP_map_key_u16(p *_Instr) { - self.parse_unsigned(uint16Type, "", p.vi()) // PARSE uint16 - self.range_unsigned(_I_uint16, _T_uint16, math.MaxUint16) // RANGE uint16 - self.match_char('"') - self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN uint16, vt.Iv -} - -func (self *_Assembler) _asm_OP_map_key_u32(p *_Instr) { - self.parse_unsigned(uint32Type, "", p.vi()) // PARSE uint32 - self.range_unsigned(_I_uint32, _T_uint32, math.MaxUint32) // RANGE uint32 - self.match_char('"') - if vt := p.vt(); !mapfast(vt) { - self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN uint32, vt.Iv - } else { - self.mapassign_fastx(vt, _F_mapassign_fast32) // MAPASSIGN uint32, mapassign_fast32 - } -} - -func (self *_Assembler) _asm_OP_map_key_u64(p *_Instr) { - self.parse_unsigned(uint64Type, "", p.vi()) // PARSE uint64 - self.match_char('"') - if vt := p.vt(); !mapfast(vt) { - self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN uint64, vt.Iv - } else { - self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX - self.mapassign_fastx(vt, _F_mapassign_fast64) // MAPASSIGN uint64, mapassign_fast64 - } -} - -func (self *_Assembler) _asm_OP_map_key_f32(p *_Instr) { - self.parse_number(float32Type, "", p.vi()) // PARSE NUMBER - self.range_single() // RANGE float32 - self.Emit("MOVSS", _X0, _VAR_st_Dv) // MOVSS X0, st.Dv - self.match_char('"') - self.mapassign_std(p.vt(), _VAR_st_Dv) // MAPASSIGN ${p.vt()}, mapassign, st.Dv -} - -func (self *_Assembler) _asm_OP_map_key_f64(p *_Instr) { - self.parse_number(float64Type, "", p.vi()) // PARSE NUMBER - self.match_char('"') - self.mapassign_std(p.vt(), _VAR_st_Dv) // MAPASSIGN ${p.vt()}, mapassign, st.Dv -} - -func (self *_Assembler) _asm_OP_map_key_str(p *_Instr) { - self.parse_string() // PARSE STRING - self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true) // UNQUOTE once, sv.p, sv.n - if vt := p.vt(); !mapfast(vt) { - self.valloc(vt.Key(), _DI) - self.Emit("MOVOU", _VAR_sv, _X0) - self.Emit("MOVOU", _X0, jit.Ptr(_DI, 0)) - self.mapassign_std(vt, jit.Ptr(_DI, 0)) - } else { - self.Emit("MOVQ", _VAR_sv_p, _DI) // MOVQ sv.p, DI - self.Emit("MOVQ", _VAR_sv_n, _SI) // MOVQ sv.n, SI - self.mapassign_str_fast(vt, _DI, _SI) // MAPASSIGN string, DI, SI - } -} - -func (self *_Assembler) _asm_OP_map_key_utext(p *_Instr) { - self.parse_string() // PARSE STRING - self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true) // UNQUOTE once, sv.p, sv.n - self.mapassign_utext(p.vt(), false) // MAPASSIGN utext, ${p.vt()}, false -} - -func (self *_Assembler) _asm_OP_map_key_utext_p(p *_Instr) { - self.parse_string() // PARSE STRING - self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, false) // UNQUOTE once, sv.p, sv.n - self.mapassign_utext(p.vt(), true) // MAPASSIGN utext, ${p.vt()}, true -} - -func (self *_Assembler) _asm_OP_array_skip(_ *_Instr) { - self.call_sf(_F_skip_array) // CALL_SF skip_array - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v -} - -func (self *_Assembler) _asm_OP_array_clear(p *_Instr) { - self.mem_clear_rem(p.i64(), true) -} - -func (self *_Assembler) _asm_OP_array_clear_p(p *_Instr) { - self.mem_clear_rem(p.i64(), false) -} - -func (self *_Assembler) _asm_OP_slice_init(p *_Instr) { - self.Emit("XORL" , _AX, _AX) // XORL AX, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP) - self.Emit("MOVQ" , jit.Ptr(_VP, 16), _AX) // MOVQ 16(VP), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JNZ" , "_done_{n}") // JNZ _done_{n} - self.Emit("MOVQ" , jit.Imm(_MinSlice), _CX) // MOVQ ${_MinSlice}, CX - self.Emit("MOVQ" , _CX, jit.Ptr(_VP, 16)) // MOVQ CX, 16(VP) - self.Emit("MOVQ" , jit.Type(p.vt()), _DX) // MOVQ ${p.vt()}, DX - self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 0)) // MOVQ DX, (SP) - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 16)) // MOVQ CX, 16(SP) - self.call_go(_F_makeslice) // CALL_GO makeslice - self.Emit("MOVQ" , jit.Ptr(_SP, 24), _AX) // MOVQ 24(SP), AX - self.WritePtrAX(7, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP) - self.Link("_done_{n}") // _done_{n}: - self.Emit("XORL" , _AX, _AX) // XORL AX, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP) -} - -func (self *_Assembler) _asm_OP_check_empty(p *_Instr) { - rbracket := p.vb() - if rbracket == ']' { - self.check_eof(1) - self.Emit("LEAQ", jit.Ptr(_IC, 1), _AX) // LEAQ 1(IC), AX - self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(rbracket))) // CMPB (IP)(IC), ']' - self.Sjmp("JNE" , "_not_empty_array_{n}") // JNE _not_empty_array_{n} - self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC - self.StorePtr(_Zero_Base, jit.Ptr(_VP, 0), _AX) // MOVQ $zerobase, (VP) - self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0 - self.Emit("MOVOU", _X0, jit.Ptr(_VP, 8)) // MOVOU X0, 8(VP) - self.Xjmp("JMP" , p.vi()) // JMP {p.vi()} - self.Link("_not_empty_array_{n}") - } else { - panic("only implement check empty array here!") - } -} - -func (self *_Assembler) _asm_OP_slice_append(p *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_VP, 8), _AX) // MOVQ 8(VP), AX - self.Emit("CMPQ" , _AX, jit.Ptr(_VP, 16)) // CMPQ AX, 16(VP) - self.Sjmp("JB" , "_index_{n}") // JB _index_{n} - self.Emit("MOVQ" , jit.Type(p.vt()), _AX) // MOVQ ${p.vt()}, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVOU", jit.Ptr(_VP, 0), _X0) // MOVOU (VP), X0 - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP) - self.Emit("MOVQ" , jit.Ptr(_VP, 16), _AX) // MOVQ 16(VP), AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 24)) // MOVQ AX, 24(SP) - self.Emit("SHLQ" , jit.Imm(1), _AX) // SHLQ $1, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP) - self.call_go(_F_growslice) // CALL_GO growslice - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _DI) // MOVQ 40(SP), DI - self.Emit("MOVQ" , jit.Ptr(_SP, 48), _AX) // MOVQ 48(SP), AX - self.Emit("MOVQ" , jit.Ptr(_SP, 56), _SI) // MOVQ 56(SP), SI - self.WriteRecNotAX(8, _DI, jit.Ptr(_VP, 0), true, true)// MOVQ DI, (VP) - self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP) - self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16)) // MOVQ SI, 16(VP) - - // because growslice not zero memory {oldcap, newlen} when append et not has ptrdata. - // but we should zero it, avoid decode it as random values. - if rt.UnpackType(p.vt()).PtrData == 0 { - self.Emit("SUBQ" , _AX, _SI) // MOVQ AX, SI - - self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8)) // ADDQ $1, 8(VP) - self.Emit("MOVQ" , _DI, _VP) // MOVQ DI, VP - self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX) // MOVQ ${p.vlen()}, CX - self.From("MULQ" , _CX) // MULQ CX - self.Emit("ADDQ" , _AX, _VP) // ADDQ AX, VP - - self.Emit("MOVQ" , _SI, _AX) // MOVQ SI, AX - self.From("MULQ" , _CX) // MULQ CX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - - self.Emit("MOVQ" , _VP, jit.Ptr(_SP, 0)) // MOVQ VP, (SP) - self.mem_clear_fn(true) // CALL_GO memclr{Has,NoHeap} - self.Sjmp("JMP", "_append_slice_end_{n}") // JMP _append_slice_end_{n} - } - - self.Link("_index_{n}") // _index_{n}: - self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8)) // ADDQ $1, 8(VP) - self.Emit("MOVQ" , jit.Ptr(_VP, 0), _VP) // MOVQ (VP), VP - self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX) // MOVQ ${p.vlen()}, CX - self.From("MULQ" , _CX) // MULQ CX - self.Emit("ADDQ" , _AX, _VP) // ADDQ AX, VP - self.Link("_append_slice_end_{n}") -} - -func (self *_Assembler) _asm_OP_object_skip(_ *_Instr) { - self.call_sf(_F_skip_object) // CALL_SF skip_object - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v -} - -func (self *_Assembler) _asm_OP_object_next(_ *_Instr) { - self.call_sf(_F_skip_one) // CALL_SF skip_one - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v -} - -func (self *_Assembler) _asm_OP_struct_field(p *_Instr) { - assert_eq(caching.FieldEntrySize, 32, "invalid field entry size") - self.Emit("MOVQ" , jit.Imm(-1), _AX) // MOVQ $-1, AX - self.Emit("MOVQ" , _AX, _VAR_sr) // MOVQ AX, sr - self.parse_string() // PARSE STRING - self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, false) // UNQUOTE once, sv.p, sv.n - self.Emit("LEAQ" , _VAR_sv, _AX) // LEAQ sv, AX - self.Emit("XORL" , _CX, _CX) // XORL CX, CX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP) - self.call_go(_F_strhash) // CALL_GO strhash - self.Emit("MOVQ" , jit.Ptr(_SP, 16), _AX) // MOVQ 16(SP), AX - self.Emit("MOVQ" , _AX, _R9) // MOVQ AX, R9 - self.Emit("MOVQ" , jit.Imm(freezeFields(p.vf())), _CX) // MOVQ ${p.vf()}, CX - self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_b), _SI) // MOVQ FieldMap.b(CX), SI - self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_N), _CX) // MOVQ FieldMap.N(CX), CX - self.Emit("TESTQ", _CX, _CX) // TESTQ CX, CX - self.Sjmp("JZ" , "_try_lowercase_{n}") // JZ _try_lowercase_{n} - self.Link("_loop_{n}") // _loop_{n}: - self.Emit("XORL" , _DX, _DX) // XORL DX, DX - self.From("DIVQ" , _CX) // DIVQ CX - self.Emit("LEAQ" , jit.Ptr(_DX, 1), _AX) // LEAQ 1(DX), AX - self.Emit("SHLQ" , jit.Imm(5), _DX) // SHLQ $5, DX - self.Emit("LEAQ" , jit.Sib(_SI, _DX, 1, 0), _DI) // LEAQ (SI)(DX), DI - self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Hash), _R8) // MOVQ FieldEntry.Hash(DI), R8 - self.Emit("TESTQ", _R8, _R8) // TESTQ R8, R8 - self.Sjmp("JZ" , "_try_lowercase_{n}") // JZ _try_lowercase_{n} - self.Emit("CMPQ" , _R8, _R9) // CMPQ R8, R9 - self.Sjmp("JNE" , "_loop_{n}") // JNE _loop_{n} - self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name + 8), _DX) // MOVQ FieldEntry.Name+8(DI), DX - self.Emit("CMPQ" , _DX, _VAR_sv_n) // CMPQ DX, sv.n - self.Sjmp("JNE" , "_loop_{n}") // JNE _loop_{n} - self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_ID), _R8) // MOVQ FieldEntry.ID(DI), R8 - self.Emit("MOVQ" , _AX, _VAR_ss_AX) // MOVQ AX, ss.AX - self.Emit("MOVQ" , _CX, _VAR_ss_CX) // MOVQ CX, ss.CX - self.Emit("MOVQ" , _SI, _VAR_ss_SI) // MOVQ SI, ss.SI - self.Emit("MOVQ" , _R8, _VAR_ss_R8) // MOVQ R8, ss.R8 - self.Emit("MOVQ" , _R9, _VAR_ss_R9) // MOVQ R9, ss.R9 - self.Emit("MOVQ" , _VAR_sv_p, _AX) // MOVQ _VAR_sv_p, AX - self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name), _CX) // MOVQ FieldEntry.Name(DI), CX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP) - self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 16)) // MOVQ DX, 16(SP) - self.call_go(_F_memequal) // CALL_GO memequal - self.Emit("MOVQ" , _VAR_ss_AX, _AX) // MOVQ ss.AX, AX - self.Emit("MOVQ" , _VAR_ss_CX, _CX) // MOVQ ss.CX, CX - self.Emit("MOVQ" , _VAR_ss_SI, _SI) // MOVQ ss.SI, SI - self.Emit("MOVQ" , _VAR_ss_R9, _R9) // MOVQ ss.R9, R9 - self.Emit("MOVB" , jit.Ptr(_SP, 24), _DX) // MOVB 24(SP), DX - self.Emit("TESTB", _DX, _DX) // TESTB DX, DX - self.Sjmp("JZ" , "_loop_{n}") // JZ _loop_{n} - self.Emit("MOVQ" , _VAR_ss_R8, _R8) // MOVQ ss.R8, R8 - self.Emit("MOVQ" , _R8, _VAR_sr) // MOVQ R8, sr - self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n} - self.Link("_try_lowercase_{n}") // _try_lowercase_{n}: - self.Emit("MOVQ" , jit.Imm(referenceFields(p.vf())), _AX) // MOVQ ${p.vf()}, AX - self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0 - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP) - self.call_go(_F_FieldMap_GetCaseInsensitive) // CALL_GO FieldMap::GetCaseInsensitive - self.Emit("MOVQ" , jit.Ptr(_SP, 24), _AX) // MOVQ 24(SP), AX - self.Emit("MOVQ" , _AX, _VAR_sr) // MOVQ AX, _VAR_sr - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JNS" , "_end_{n}") // JNS _end_{n} - self.Emit("BTQ" , jit.Imm(_F_disable_unknown), _ARG_fv) // BTQ ${_F_disable_unknown}, fv - self.Sjmp("JC" , _LB_field_error) // JC _field_error - self.Link("_end_{n}") // _end_{n}: -} - -func (self *_Assembler) _asm_OP_unmarshal(p *_Instr) { - self.unmarshal_json(p.vt(), true) -} - -func (self *_Assembler) _asm_OP_unmarshal_p(p *_Instr) { - self.unmarshal_json(p.vt(), false) -} - -func (self *_Assembler) _asm_OP_unmarshal_text(p *_Instr) { - self.unmarshal_text(p.vt(), true) -} - -func (self *_Assembler) _asm_OP_unmarshal_text_p(p *_Instr) { - self.unmarshal_text(p.vt(), false) -} - -func (self *_Assembler) _asm_OP_lspace(_ *_Instr) { - self.lspace("_{n}") -} - -func (self *_Assembler) lspace(subfix string) { - var label = "_lspace" + subfix - - self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL - self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error - self.Emit("MOVQ" , jit.Imm(_BM_space), _DX) // MOVQ _BM_space, DX - self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX - self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' ' - self.Sjmp("JA" , label) // JA _nospace_{n} - self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX - self.Sjmp("JNC" , label) // JNC _nospace_{n} - - /* test up to 4 characters */ - for i := 0; i < 3; i++ { - self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC - self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL - self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error - self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX - self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' ' - self.Sjmp("JA" , label) // JA _nospace_{n} - self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX - self.Sjmp("JNC" , label) // JNC _nospace_{n} - } - - /* handle over to the native function */ - self.Emit("MOVQ" , _IP, _DI) // MOVQ IP, DI - self.Emit("MOVQ" , _IL, _SI) // MOVQ IL, SI - self.Emit("MOVQ" , _IC, _DX) // MOVQ IC, DX - self.call(_F_lspace) // CALL lspace - self.Emit("TESTQ" , _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_parsing_error_v) // JS _parsing_error_v - self.Emit("CMPQ" , _AX, _IL) // CMPQ AX, IL - self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error - self.Emit("MOVQ" , _AX, _IC) // MOVQ AX, IC - self.Link(label) // _nospace_{n}: -} - -func (self *_Assembler) _asm_OP_match_char(p *_Instr) { - self.match_char(p.vb()) -} - -func (self *_Assembler) match_char(char byte) { - self.check_eof(1) - self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(char))) // CMPB (IP)(IC), ${p.vb()} - self.Sjmp("JNE" , _LB_char_0_error) // JNE _char_0_error - self.Emit("ADDQ", jit.Imm(1), _IC) // ADDQ $1, IC -} - -func (self *_Assembler) _asm_OP_check_char(p *_Instr) { - self.check_eof(1) - self.Emit("LEAQ" , jit.Ptr(_IC, 1), _AX) // LEAQ 1(IC), AX - self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb()))) // CMPB (IP)(IC), ${p.vb()} - self.Emit("CMOVQEQ", _AX, _IC) // CMOVQEQ AX, IC - self.Xjmp("JE" , p.vi()) // JE {p.vi()} -} - -func (self *_Assembler) _asm_OP_check_char_0(p *_Instr) { - self.check_eof(1) - self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb()))) // CMPB (IP)(IC), ${p.vb()} - self.Xjmp("JE" , p.vi()) // JE {p.vi()} -} - -func (self *_Assembler) _asm_OP_add(p *_Instr) { - self.Emit("ADDQ", jit.Imm(int64(p.vi())), _IC) // ADDQ ${p.vi()}, IC -} - -func (self *_Assembler) _asm_OP_load(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _VP) // MOVQ (ST)(AX), VP -} - -func (self *_Assembler) _asm_OP_save(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX) // MOVQ (ST), CX - self.Emit("CMPQ", _CX, jit.Imm(_MaxStackBytes)) // CMPQ CX, ${_MaxStackBytes} - self.Sjmp("JAE" , _LB_stack_error) // JA _stack_error - self.WriteRecNotAX(0 , _VP, jit.Sib(_ST, _CX, 1, 8), false, false) // MOVQ VP, 8(ST)(CX) - self.Emit("ADDQ", jit.Imm(8), _CX) // ADDQ $8, CX - self.Emit("MOVQ", _CX, jit.Ptr(_ST, 0)) // MOVQ CX, (ST) -} - -func (self *_Assembler) _asm_OP_drop(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX - self.Emit("SUBQ", jit.Imm(8), _AX) // SUBQ $8, AX - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _VP) // MOVQ 8(ST)(AX), VP - self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST) - self.Emit("XORL", _ET, _ET) // XORL ET, ET - self.Emit("MOVQ", _ET, jit.Sib(_ST, _AX, 1, 8)) // MOVQ ET, 8(ST)(AX) -} - -func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX - self.Emit("SUBQ" , jit.Imm(16), _AX) // SUBQ $16, AX - self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _VP) // MOVQ 8(ST)(AX), VP - self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST) - self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0 - self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX) -} - -func (self *_Assembler) _asm_OP_recurse(p *_Instr) { - self.Emit("MOVQ", jit.Type(p.vt()), _AX) // MOVQ ${p.vt()}, AX - self.decode_dynamic(_AX, _VP) // DECODE AX, VP -} - -func (self *_Assembler) _asm_OP_goto(p *_Instr) { - self.Xjmp("JMP", p.vi()) -} - -func (self *_Assembler) _asm_OP_switch(p *_Instr) { - self.Emit("MOVQ", _VAR_sr, _AX) // MOVQ sr, AX - self.Emit("CMPQ", _AX, jit.Imm(p.i64())) // CMPQ AX, ${len(p.vs())} - self.Sjmp("JAE" , "_default_{n}") // JAE _default_{n} - - /* jump table selector */ - self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI - self.Sref("_switch_table_{n}", 4) // .... &_switch_table_{n} - self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX) // MOVLQSX (DI)(AX*4), AX - self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX - self.Rjmp("JMP" , _AX) // JMP AX - self.Link("_switch_table_{n}") // _switch_table_{n}: - - /* generate the jump table */ - for i, v := range p.vs() { - self.Xref(v, int64(-i) * 4) - } - - /* default case */ - self.Link("_default_{n}") - self.NOP() -} - -func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) { - self.Emit("MOVQ", jit.Imm(int64(p2.op())), jit.Ptr(_SP, 16))// MOVQ $(p2.op()), 16(SP) - self.Emit("MOVQ", jit.Imm(int64(p1.op())), jit.Ptr(_SP, 8)) // MOVQ $(p1.op()), 8(SP) - self.Emit("MOVQ", jit.Imm(int64(i)), jit.Ptr(_SP, 0)) // MOVQ $(i), (SP) - self.call_go(_F_println) -} - -var _runtime_writeBarrier uintptr = rt.GcwbAddr() - -//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier -func gcWriteBarrierAX() - -var ( - _V_writeBarrier = jit.Imm(int64(_runtime_writeBarrier)) - - _F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX) -) - -func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) { - self.Emit("MOVQ", _V_writeBarrier, _R10) - self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0)) - self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - if saveDI { - self.save(_DI) - } - self.Emit("LEAQ", rec, _DI) - self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX - self.Rjmp("CALL", _R10) - if saveDI { - self.load(_DI) - } - self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", _AX, rec) - self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") -} - -func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) { - if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX { - panic("rec contains AX!") - } - self.Emit("MOVQ", _V_writeBarrier, _R10) - self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0)) - self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - if saveAX { - self.Emit("XCHGQ", ptr, _AX) - } else { - self.Emit("MOVQ", ptr, _AX) - } - if saveDI { - self.save(_DI) - } - self.Emit("LEAQ", rec, _DI) - self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX - self.Rjmp("CALL", _R10) - if saveDI { - self.load(_DI) - } - if saveAX { - self.Emit("XCHGQ", ptr, _AX) - } - self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", ptr, rec) - self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") -} \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/consts/option.go b/vendor/github.com/bytedance/sonic/internal/decoder/consts/option.go new file mode 100644 index 0000000000000000000000000000000000000000..4195ebda7c9f09cc1c18d4d1c1857e767f6016f7 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/consts/option.go @@ -0,0 +1,36 @@ + +package consts + +import ( + `github.com/bytedance/sonic/internal/native/types` +) + + +const ( + F_use_int64 = 0 + F_disable_urc = 2 + F_disable_unknown = 3 + F_copy_string = 4 + + + F_use_number = types.B_USE_NUMBER + F_validate_string = types.B_VALIDATE_STRING + F_allow_control = types.B_ALLOW_CONTROL + F_no_validate_json = types.B_NO_VALIDATE_JSON +) + +type Options uint64 + +const ( + OptionUseInt64 Options = 1 << F_use_int64 + OptionUseNumber Options = 1 << F_use_number + OptionUseUnicodeErrors Options = 1 << F_disable_urc + OptionDisableUnknown Options = 1 << F_disable_unknown + OptionCopyString Options = 1 << F_copy_string + OptionValidateString Options = 1 << F_validate_string + OptionNoValidateJSON Options = 1 << F_no_validate_json +) + +const ( + MaxStack = 4096 +) \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/errors.go b/vendor/github.com/bytedance/sonic/internal/decoder/errors/errors.go similarity index 75% rename from vendor/github.com/bytedance/sonic/internal/decoder/errors.go rename to vendor/github.com/bytedance/sonic/internal/decoder/errors/errors.go index c905fdfb0c670d0a06f5300bc033bc721352a4f1..9f05e8b6a90994a877b07a3297d4b02e61256d45 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/errors.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/errors/errors.go @@ -14,7 +14,7 @@ * limitations under the License. */ -package decoder +package errors import ( `encoding/json` @@ -44,45 +44,55 @@ func (self SyntaxError) Description() string { } func (self SyntaxError) description() string { - i := 16 - p := self.Pos - i - q := self.Pos + i - /* check for empty source */ if self.Src == "" { - return fmt.Sprintf("no sources available: %#v", self) + return fmt.Sprintf("no sources available, the input json is empty: %#v", self) } + p, x, q, y := calcBounds(len(self.Src), self.Pos) + + /* compose the error description */ + return fmt.Sprintf( + "at index %d: %s\n\n\t%s\n\t%s^%s\n", + self.Pos, + self.Message(), + self.Src[p:q], + strings.Repeat(".", x), + strings.Repeat(".", y), + ) +} + +func calcBounds(size int, pos int) (lbound int, lwidth int, rbound int, rwidth int) { + if pos >= size || pos < 0 { + return 0, 0, size, 0 + } + + i := 16 + lbound = pos - i + rbound = pos + i + /* prevent slicing before the beginning */ - if p < 0 { - p, q, i = 0, q - p, i + p + if lbound < 0 { + lbound, rbound, i = 0, rbound - lbound, i + lbound } /* prevent slicing beyond the end */ - if n := len(self.Src); q > n { - n = q - n - q = len(self.Src) + if n := size; rbound > n { + n = rbound - n + rbound = size /* move the left bound if possible */ - if p > n { + if lbound > n { i += n - p -= n + lbound -= n } } /* left and right length */ - x := clamp_zero(i) - y := clamp_zero(q - p - i - 1) + lwidth = clamp_zero(i) + rwidth = clamp_zero(rbound - lbound - i - 1) - /* compose the error description */ - return fmt.Sprintf( - "at index %d: %s\n\n\t%s\n\t%s^%s\n", - self.Pos, - self.Message(), - self.Src[p:q], - strings.Repeat(".", x), - strings.Repeat(".", y), - ) + return } func (self SyntaxError) Message() string { @@ -102,22 +112,25 @@ func clamp_zero(v int) int { /** JIT Error Helpers **/ -var stackOverflow = &json.UnsupportedValueError { +var StackOverflow = &json.UnsupportedValueError { Str : "Value nesting too deep", Value : reflect.ValueOf("..."), } -//go:nosplit -func error_wrap(src string, pos int, code types.ParsingError) error { - return SyntaxError { +func ErrorWrap(src string, pos int, code types.ParsingError) error { + return *error_wrap_heap(src, pos, code) +} + +//go:noinline +func error_wrap_heap(src string, pos int, code types.ParsingError) *SyntaxError { + return &SyntaxError { Pos : pos, Src : src, Code : code, } } -//go:nosplit -func error_type(vt *rt.GoType) error { +func ErrorType(vt *rt.GoType) error { return &json.UnmarshalTypeError{Type: vt.Pack()} } @@ -158,8 +171,7 @@ func (self MismatchTypeError) Description() string { return fmt.Sprintf("Mismatch type %s with value %s %s", self.Type.String(), swithchJSONType(self.Src, self.Pos), se.description()) } -//go:nosplit -func error_mismatch(src string, pos int, vt *rt.GoType) error { +func ErrorMismatch(src string, pos int, vt *rt.GoType) error { return &MismatchTypeError { Pos : pos, Src : src, @@ -167,13 +179,11 @@ func error_mismatch(src string, pos int, vt *rt.GoType) error { } } -//go:nosplit -func error_field(name string) error { +func ErrorField(name string) error { return errors.New("json: unknown field " + strconv.Quote(name)) } -//go:nosplit -func error_value(value string, vtype reflect.Type) error { +func ErrorValue(value string, vtype reflect.Type) error { return &json.UnmarshalTypeError { Type : vtype, Value : value, diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/generic_amd64_go116.go b/vendor/github.com/bytedance/sonic/internal/decoder/generic_amd64_go116.go deleted file mode 100644 index b597043f9d482113f36b543db766fa81fe33fcaa..0000000000000000000000000000000000000000 --- a/vendor/github.com/bytedance/sonic/internal/decoder/generic_amd64_go116.go +++ /dev/null @@ -1,776 +0,0 @@ -// +build go1.15,!go1.17 - -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package decoder - -import ( - `encoding/json` - `fmt` - `reflect` - `strconv` - - `github.com/bytedance/sonic/internal/jit` - `github.com/bytedance/sonic/internal/native` - `github.com/bytedance/sonic/internal/native/types` - `github.com/twitchyliquid64/golang-asm/obj` - `github.com/twitchyliquid64/golang-asm/obj/x86` -) - -/** Crucial Registers: - * - * ST(BX) : ro, decoder stack - * DF(R10) : ro, decoder flags - * EP(R11) : wo, error pointer - * IP(R12) : ro, input pointer - * IL(R13) : ro, input length - * IC(R14) : rw, input cursor - * VP(R15) : ro, value pointer (to an interface{}) - */ - -const ( - _VD_args = 8 // 8 bytes for passing arguments to this functions - _VD_fargs = 64 // 64 bytes for passing arguments to other Go functions - _VD_saves = 40 // 40 bytes for saving the registers before CALL instructions - _VD_locals = 88 // 88 bytes for local variables -) - -const ( - _VD_offs = _VD_fargs + _VD_saves + _VD_locals - _VD_size = _VD_offs + 8 // 8 bytes for the parent frame pointer -) - -var ( - _VAR_ss = _VAR_ss_Vt - _VAR_df = jit.Ptr(_SP, _VD_fargs + _VD_saves) -) - -var ( - _VAR_ss_Vt = jit.Ptr(_SP, _VD_fargs + _VD_saves + 8) - _VAR_ss_Dv = jit.Ptr(_SP, _VD_fargs + _VD_saves + 16) - _VAR_ss_Iv = jit.Ptr(_SP, _VD_fargs + _VD_saves + 24) - _VAR_ss_Ep = jit.Ptr(_SP, _VD_fargs + _VD_saves + 32) - _VAR_ss_Db = jit.Ptr(_SP, _VD_fargs + _VD_saves + 40) - _VAR_ss_Dc = jit.Ptr(_SP, _VD_fargs + _VD_saves + 48) -) - -var ( - _VAR_cs_LR = jit.Ptr(_SP, _VD_fargs + _VD_saves + 56) - _VAR_cs_p = jit.Ptr(_SP, _VD_fargs + _VD_saves + 64) - _VAR_cs_n = jit.Ptr(_SP, _VD_fargs + _VD_saves + 72) - _VAR_cs_d = jit.Ptr(_SP, _VD_fargs + _VD_saves + 80) -) - -type _ValueDecoder struct { - jit.BaseAssembler -} - -func (self *_ValueDecoder) build() uintptr { - self.Init(self.compile) - return *(*uintptr)(self.Load("decode_value", _VD_size, _VD_args, argPtrs_generic, localPtrs_generic)) -} - -/** Function Calling Helpers **/ - -func (self *_ValueDecoder) save(r ...obj.Addr) { - for i, v := range r { - if i > _VD_saves / 8 - 1 { - panic("too many registers to save") - } else { - self.Emit("MOVQ", v, jit.Ptr(_SP, _VD_fargs + int64(i) * 8)) - } - } -} - -func (self *_ValueDecoder) load(r ...obj.Addr) { - for i, v := range r { - if i > _VD_saves / 8 - 1 { - panic("too many registers to load") - } else { - self.Emit("MOVQ", jit.Ptr(_SP, _VD_fargs + int64(i) * 8), v) - } - } -} - -func (self *_ValueDecoder) call(fn obj.Addr) { - self.Emit("MOVQ", fn, _AX) // MOVQ ${fn}, AX - self.Rjmp("CALL", _AX) // CALL AX -} - -func (self *_ValueDecoder) call_go(fn obj.Addr) { - self.save(_REG_go...) // SAVE $REG_go - self.call(fn) // CALL ${fn} - self.load(_REG_go...) // LOAD $REG_go -} - -/** Decoder Assembler **/ - -const ( - _S_val = iota + 1 - _S_arr - _S_arr_0 - _S_obj - _S_obj_0 - _S_obj_delim - _S_obj_sep -) - -const ( - _S_omask_key = (1 << _S_obj_0) | (1 << _S_obj_sep) - _S_omask_end = (1 << _S_obj_0) | (1 << _S_obj) - _S_vmask = (1 << _S_val) | (1 << _S_arr_0) -) - -const ( - _A_init_len = 1 - _A_init_cap = 16 -) - -const ( - _ST_Sp = 0 - _ST_Vt = _PtrBytes - _ST_Vp = _PtrBytes * (types.MAX_RECURSE + 1) -) - -var ( - _V_true = jit.Imm(int64(pbool(true))) - _V_false = jit.Imm(int64(pbool(false))) - _F_value = jit.Imm(int64(native.S_value)) -) - -var ( - _V_max = jit.Imm(int64(types.V_MAX)) - _E_eof = jit.Imm(int64(types.ERR_EOF)) - _E_invalid = jit.Imm(int64(types.ERR_INVALID_CHAR)) - _E_recurse = jit.Imm(int64(types.ERR_RECURSE_EXCEED_MAX)) -) - -var ( - _F_convTslice = jit.Func(convTslice) - _F_convTstring = jit.Func(convTstring) - _F_invalid_vtype = jit.Func(invalid_vtype) -) - -var ( - _T_map = jit.Type(reflect.TypeOf((map[string]interface{})(nil))) - _T_bool = jit.Type(reflect.TypeOf(false)) - _T_int64 = jit.Type(reflect.TypeOf(int64(0))) - _T_eface = jit.Type(reflect.TypeOf((*interface{})(nil)).Elem()) - _T_slice = jit.Type(reflect.TypeOf(([]interface{})(nil))) - _T_string = jit.Type(reflect.TypeOf("")) - _T_number = jit.Type(reflect.TypeOf(json.Number(""))) - _T_float64 = jit.Type(reflect.TypeOf(float64(0))) -) - -var _R_tab = map[int]string { - '[': "_decode_V_ARRAY", - '{': "_decode_V_OBJECT", - ':': "_decode_V_KEY_SEP", - ',': "_decode_V_ELEM_SEP", - ']': "_decode_V_ARRAY_END", - '}': "_decode_V_OBJECT_END", -} - -func (self *_ValueDecoder) compile() { - self.Emit("SUBQ", jit.Imm(_VD_size), _SP) // SUBQ $_VD_size, SP - self.Emit("MOVQ", _BP, jit.Ptr(_SP, _VD_offs)) // MOVQ BP, _VD_offs(SP) - self.Emit("LEAQ", jit.Ptr(_SP, _VD_offs), _BP) // LEAQ _VD_offs(SP), BP - - /* initialize the state machine */ - self.Emit("XORL", _CX, _CX) // XORL CX, CX - self.Emit("MOVQ", _DF, _VAR_df) // MOVQ DF, df - /* initialize digital buffer first */ - self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_ss_Dc) // MOVQ $_MaxDigitNums, ss.Dcap - self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX) // LEAQ _DbufOffset(ST), AX - self.Emit("MOVQ", _AX, _VAR_ss_Db) // MOVQ AX, ss.Dbuf - /* add ST offset */ - self.Emit("ADDQ", jit.Imm(_FsmOffset), _ST) // ADDQ _FsmOffset, _ST - self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp - self.WriteRecNotAX(0, _VP, jit.Ptr(_ST, _ST_Vp), false) // MOVQ VP, ST.Vp[0] - self.Emit("MOVQ", jit.Imm(_S_val), jit.Ptr(_ST, _ST_Vt)) // MOVQ _S_val, ST.Vt[0] - self.Sjmp("JMP" , "_next") // JMP _next - - /* set the value from previous round */ - self.Link("_set_value") // _set_value: - self.Emit("MOVL" , jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX - self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ" , jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX - self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX - self.Sjmp("JNC" , "_vtype_error") // JNC _vtype_error - self.Emit("XORL" , _SI, _SI) // XORL SI, SI - self.Emit("SUBQ" , jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp - self.Emit("XCHGQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // XCHGQ ST.Vp[CX], SI - self.Emit("MOVQ" , _R8, jit.Ptr(_SI, 0)) // MOVQ R8, (SI) - self.WriteRecNotAX(1, _R9, jit.Ptr(_SI, 8), false) // MOVQ R9, 8(SI) - - /* check for value stack */ - self.Link("_next") // _next: - self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _AX) // MOVQ ST.Sp, AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , "_return") // JS _return - - /* fast path: test up to 4 characters manually */ - self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL - self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF - self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX - self.Emit("MOVQ" , jit.Imm(_BM_space), _DX) // MOVQ _BM_space, DX - self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' ' - self.Sjmp("JA" , "_decode_fast") // JA _decode_fast - self.Emit("BTQ" , _AX, _DX) // BTQ _AX, _DX - self.Sjmp("JNC" , "_decode_fast") // JNC _decode_fast - self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC - - /* at least 1 to 3 spaces */ - for i := 0; i < 3; i++ { - self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL - self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF - self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX - self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' ' - self.Sjmp("JA" , "_decode_fast") // JA _decode_fast - self.Emit("BTQ" , _AX, _DX) // BTQ _AX, _DX - self.Sjmp("JNC" , "_decode_fast") // JNC _decode_fast - self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC - } - - /* at least 4 spaces */ - self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL - self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF - self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX - - /* fast path: use lookup table to select decoder */ - self.Link("_decode_fast") // _decode_fast: - self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI - self.Sref("_decode_tab", 4) // .... &_decode_tab - self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX) // MOVLQSX (DI)(AX*4), AX - self.Emit("TESTQ" , _AX, _AX) // TESTQ AX, AX - self.Sjmp("JZ" , "_decode_native") // JZ _decode_native - self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC - self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX - self.Rjmp("JMP" , _AX) // JMP AX - - /* decode with native decoder */ - self.Link("_decode_native") // _decode_native: - self.Emit("MOVQ", _IP, _DI) // MOVQ IP, DI - self.Emit("MOVQ", _IL, _SI) // MOVQ IL, SI - self.Emit("MOVQ", _IC, _DX) // MOVQ IC, DX - self.Emit("LEAQ", _VAR_ss, _CX) // LEAQ ss, CX - self.Emit("MOVQ", _VAR_df, _R8) // MOVQ $df, R8 - self.Emit("BTSQ", jit.Imm(_F_allow_control), _R8) // ANDQ $1<<_F_allow_control, R8 - self.call(_F_value) // CALL value - self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC - - /* check for errors */ - self.Emit("MOVQ" , _VAR_ss_Vt, _AX) // MOVQ ss.Vt, AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , "_parsing_error") - self.Sjmp("JZ" , "_invalid_vtype") // JZ _invalid_vtype - self.Emit("CMPQ" , _AX, _V_max) // CMPQ AX, _V_max - self.Sjmp("JA" , "_invalid_vtype") // JA _invalid_vtype - - /* jump table selector */ - self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI - self.Sref("_switch_table", 4) // .... &_switch_table - self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, -4), _AX) // MOVLQSX -4(DI)(AX*4), AX - self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX - self.Rjmp("JMP" , _AX) // JMP AX - - /** V_EOF **/ - self.Link("_decode_V_EOF") // _decode_V_EOF: - self.Emit("MOVL", _E_eof, _EP) // MOVL _E_eof, EP - self.Sjmp("JMP" , "_error") // JMP _error - - /** V_NULL **/ - self.Link("_decode_V_NULL") // _decode_V_NULL: - self.Emit("XORL", _R8, _R8) // XORL R8, R8 - self.Emit("XORL", _R9, _R9) // XORL R9, R9 - self.Emit("LEAQ", jit.Ptr(_IC, -4), _DI) // LEAQ -4(IC), DI - self.Sjmp("JMP" , "_set_value") // JMP _set_value - - /** V_TRUE **/ - self.Link("_decode_V_TRUE") // _decode_V_TRUE: - self.Emit("MOVQ", _T_bool, _R8) // MOVQ _T_bool, R8 - // TODO: maybe modified by users? - self.Emit("MOVQ", _V_true, _R9) // MOVQ _V_true, R9 - self.Emit("LEAQ", jit.Ptr(_IC, -4), _DI) // LEAQ -4(IC), DI - self.Sjmp("JMP" , "_set_value") // JMP _set_value - - /** V_FALSE **/ - self.Link("_decode_V_FALSE") // _decode_V_FALSE: - self.Emit("MOVQ", _T_bool, _R8) // MOVQ _T_bool, R8 - self.Emit("MOVQ", _V_false, _R9) // MOVQ _V_false, R9 - self.Emit("LEAQ", jit.Ptr(_IC, -5), _DI) // LEAQ -5(IC), DI - self.Sjmp("JMP" , "_set_value") // JMP _set_value - - /** V_ARRAY **/ - self.Link("_decode_V_ARRAY") // _decode_V_ARRAY - self.Emit("MOVL", jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX - self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX - self.Sjmp("JNC" , "_invalid_char") // JNC _invalid_char - - /* create a new array */ - self.Emit("MOVQ", _T_eface, _AX) // MOVQ _T_eface, AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ", jit.Imm(_A_init_len), jit.Ptr(_SP, 8)) // MOVQ _A_init_len, 8(SP) - self.Emit("MOVQ", jit.Imm(_A_init_cap), jit.Ptr(_SP, 16)) // MOVQ _A_init_cap, 16(SP) - self.call_go(_F_makeslice) // CALL_GO runtime.makeslice - self.Emit("MOVQ", jit.Ptr(_SP, 24), _DX) // MOVQ 24(SP), DX - - /* pack into an interface */ - self.Emit("MOVQ", _DX, jit.Ptr(_SP, 0)) // MOVQ DX, (SP) - self.Emit("MOVQ", jit.Imm(_A_init_len), jit.Ptr(_SP, 8)) // MOVQ _A_init_len, 8(SP) - self.Emit("MOVQ", jit.Imm(_A_init_cap), jit.Ptr(_SP, 16)) // MOVQ _A_init_cap, 16(SP) - self.call_go(_F_convTslice) // CALL_GO runtime.convTslice - self.Emit("MOVQ", jit.Ptr(_SP, 24), _R8) // MOVQ 24(SP), R8 - - /* replace current state with an array */ - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI - self.Emit("MOVQ", jit.Imm(_S_arr), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_arr, ST.Vt[CX] - self.Emit("MOVQ", _T_slice, _AX) // MOVQ _T_slice, AX - self.Emit("MOVQ", _AX, jit.Ptr(_SI, 0)) // MOVQ AX, (SI) - self.WriteRecNotAX(2, _R8, jit.Ptr(_SI, 8), false) // MOVQ R8, 8(SI) - - /* add a new slot for the first element */ - self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX - self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE} - self.Sjmp("JAE" , "_stack_overflow") // JA _stack_overflow - self.Emit("MOVQ", jit.Ptr(_R8, 0), _AX) // MOVQ (R8), AX - self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp - self.WritePtrAX(3, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ AX, ST.Vp[CX] - self.Emit("MOVQ", jit.Imm(_S_arr_0), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_arr_0, ST.Vt[CX] - self.Sjmp("JMP" , "_next") // JMP _next - - /** V_OBJECT **/ - self.Link("_decode_V_OBJECT") // _decode_V_OBJECT: - self.Emit("MOVL", jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX - self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX - self.Sjmp("JNC" , "_invalid_char") // JNC _invalid_char - self.call_go(_F_makemap_small) // CALL_GO runtime.makemap_small - self.Emit("MOVQ", jit.Ptr(_SP, 0), _AX) // MOVQ (SP), AX - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Imm(_S_obj_0), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_obj, ST.Vt[CX] - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI - self.Emit("MOVQ", _T_map, _DX) // MOVQ _T_map, DX - self.Emit("MOVQ", _DX, jit.Ptr(_SI, 0)) // MOVQ DX, (SI) - self.WritePtrAX(4, jit.Ptr(_SI, 8), false) // MOVQ AX, 8(SI) - self.Sjmp("JMP" , "_next") // JMP _next - - /** V_STRING **/ - self.Link("_decode_V_STRING") // _decode_V_STRING: - self.Emit("MOVQ", _VAR_ss_Iv, _CX) // MOVQ ss.Iv, CX - self.Emit("MOVQ", _IC, _AX) // MOVQ IC, AX - self.Emit("SUBQ", _CX, _AX) // SUBQ CX, AX - - /* check for escapes */ - self.Emit("CMPQ", _VAR_ss_Ep, jit.Imm(-1)) // CMPQ ss.Ep, $-1 - self.Sjmp("JNE" , "_unquote") // JNE _unquote - self.Emit("SUBQ", jit.Imm(1), _AX) // SUBQ $1, AX - self.Emit("LEAQ", jit.Sib(_IP, _CX, 1, 0), _R8) // LEAQ (IP)(CX), R8 - self.Byte(0x48, 0x8d, 0x3d) // LEAQ (PC), DI - self.Sref("_copy_string_end", 4) - self.Emit("BTQ", jit.Imm(_F_copy_string), _VAR_df) - self.Sjmp("JC", "copy_string") - self.Link("_copy_string_end") - self.Emit("XORL", _DX, _DX) // XORL DX, DX - /* strings with no escape sequences */ - self.Link("_noescape") // _noescape: - self.Emit("MOVL", jit.Imm(_S_omask_key), _DI) // MOVL _S_omask, DI - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _SI) // MOVQ ST.Vt[CX], SI - self.Emit("BTQ" , _SI, _DI) // BTQ SI, DI - self.Sjmp("JC" , "_object_key") // JC _object_key - - /* check for pre-packed strings, avoid 1 allocation */ - self.Emit("TESTQ", _DX, _DX) // TESTQ DX, DX - self.Sjmp("JNZ" , "_packed_str") // JNZ _packed_str - self.Emit("MOVQ" , _R8, jit.Ptr(_SP, 0)) // MOVQ R8, (SP) - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - self.call_go(_F_convTstring) // CALL_GO runtime.convTstring - self.Emit("MOVQ" , jit.Ptr(_SP, 16), _R9) // MOVQ 16(SP), R9 - - /* packed string already in R9 */ - self.Link("_packed_str") // _packed_str: - self.Emit("MOVQ", _T_string, _R8) // MOVQ _T_string, R8 - self.Emit("MOVQ", _VAR_ss_Iv, _DI) // MOVQ ss.Iv, DI - self.Emit("SUBQ", jit.Imm(1), _DI) // SUBQ $1, DI - self.Sjmp("JMP" , "_set_value") // JMP _set_value - - /* the string is an object key, get the map */ - self.Link("_object_key") - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI - self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI - - /* add a new delimiter */ - self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX - self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE} - self.Sjmp("JAE" , "_stack_overflow") // JA _stack_overflow - self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp - self.Emit("MOVQ", jit.Imm(_S_obj_delim), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_obj_delim, ST.Vt[CX] - - /* add a new slot int the map */ - self.Emit("MOVQ", _T_map, _DX) // MOVQ _T_map, DX - self.Emit("MOVQ", _DX, jit.Ptr(_SP, 0)) // MOVQ DX, (SP) - self.Emit("MOVQ", _SI, jit.Ptr(_SP, 8)) // MOVQ SI, 8(SP) - self.Emit("MOVQ", _R8, jit.Ptr(_SP, 16)) // MOVQ R9, 16(SP) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 24)) // MOVQ AX, 24(SP) - self.call_go(_F_mapassign_faststr) // CALL_GO runtime.mapassign_faststr - self.Emit("MOVQ", jit.Ptr(_SP, 32), _AX) // MOVQ 32(SP), AX - - /* add to the pointer stack */ - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.WritePtrAX(6, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ AX, ST.Vp[CX] - self.Sjmp("JMP" , "_next") // JMP _next - - /* allocate memory to store the string header and unquoted result */ - self.Link("_unquote") // _unquote: - self.Emit("ADDQ", jit.Imm(15), _AX) // ADDQ $15, AX - self.Emit("MOVQ", _T_byte, _CX) // MOVQ _T_byte, CX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP) - self.Emit("MOVB", jit.Imm(0), jit.Ptr(_SP, 16)) // MOVB $0, 16(SP) - self.call_go(_F_mallocgc) // CALL_GO runtime.mallocgc - self.Emit("MOVQ", jit.Ptr(_SP, 24), _R9) // MOVQ 24(SP), R9 - - /* prepare the unquoting parameters */ - self.Emit("MOVQ" , _VAR_ss_Iv, _CX) // MOVQ ss.Iv, CX - self.Emit("LEAQ" , jit.Sib(_IP, _CX, 1, 0), _DI) // LEAQ (IP)(CX), DI - self.Emit("NEGQ" , _CX) // NEGQ CX - self.Emit("LEAQ" , jit.Sib(_IC, _CX, 1, -1), _SI) // LEAQ -1(IC)(CX), SI - self.Emit("LEAQ" , jit.Ptr(_R9, 16), _DX) // LEAQ 16(R8), DX - self.Emit("LEAQ" , _VAR_ss_Ep, _CX) // LEAQ ss.Ep, CX - self.Emit("XORL" , _R8, _R8) // XORL R8, R8 - self.Emit("BTQ" , jit.Imm(_F_disable_urc), _VAR_df) // BTQ ${_F_disable_urc}, fv - self.Emit("SETCC", _R8) // SETCC R8 - self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8) // SHLQ ${types.B_UNICODE_REPLACE}, R8 - - /* unquote the string, with R9 been preserved */ - self.save(_R9) // SAVE R9 - self.call(_F_unquote) // CALL unquote - self.load(_R9) // LOAD R9 - - /* check for errors */ - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , "_unquote_error") // JS _unquote_error - self.Emit("MOVL" , jit.Imm(1), _DX) // MOVL $1, DX - self.Emit("LEAQ" , jit.Ptr(_R9, 16), _R8) // ADDQ $16, R8 - self.Emit("MOVQ" , _R8, jit.Ptr(_R9, 0)) // MOVQ R8, (R9) - self.Emit("MOVQ" , _AX, jit.Ptr(_R9, 8)) // MOVQ AX, 8(R9) - self.Sjmp("JMP" , "_noescape") // JMP _noescape - - /** V_DOUBLE **/ - self.Link("_decode_V_DOUBLE") // _decode_V_DOUBLE: - self.Emit("BTQ" , jit.Imm(_F_use_number), _VAR_df) // BTQ _F_use_number, df - self.Sjmp("JC" , "_use_number") // JC _use_number - self.Emit("MOVSD", _VAR_ss_Dv, _X0) // MOVSD ss.Dv, X0 - self.Sjmp("JMP" , "_use_float64") // JMP _use_float64 - - /** V_INTEGER **/ - self.Link("_decode_V_INTEGER") // _decode_V_INTEGER: - self.Emit("BTQ" , jit.Imm(_F_use_number), _VAR_df) // BTQ _F_use_number, df - self.Sjmp("JC" , "_use_number") // JC _use_number - self.Emit("BTQ" , jit.Imm(_F_use_int64), _VAR_df) // BTQ _F_use_int64, df - self.Sjmp("JC" , "_use_int64") // JC _use_int64 - self.Emit("MOVQ" , _VAR_ss_Iv, _AX) // MOVQ ss.Iv, AX - self.Emit("CVTSQ2SD", _AX, _X0) // CVTSQ2SD AX, X0 - - /* represent numbers as `float64` */ - self.Link("_use_float64") // _use_float64: - self.Emit("MOVSD", _X0, jit.Ptr(_SP, 0)) // MOVSD X0, (SP) - self.call_go(_F_convT64) // CALL_GO runtime.convT64 - self.Emit("MOVQ" , _T_float64, _R8) // MOVQ _T_float64, R8 - self.Emit("MOVQ" , jit.Ptr(_SP, 8), _R9) // MOVQ 8(SP), R9 - self.Emit("MOVQ" , _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI - self.Sjmp("JMP" , "_set_value") // JMP _set_value - - /* represent numbers as `json.Number` */ - self.Link("_use_number") // _use_number - self.Emit("MOVQ", _VAR_ss_Ep, _AX) // MOVQ ss.Ep, AX - self.Emit("LEAQ", jit.Sib(_IP, _AX, 1, 0), _SI) // LEAQ (IP)(AX), SI - self.Emit("MOVQ", _IC, _CX) // MOVQ IC, CX - self.Emit("SUBQ", _AX, _CX) // SUBQ AX, CX - self.Emit("MOVQ", _SI, jit.Ptr(_SP, 0)) // MOVQ SI, (SP) - self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP) - self.call_go(_F_convTstring) // CALL_GO runtime.convTstring - self.Emit("MOVQ", _T_number, _R8) // MOVQ _T_number, R8 - self.Emit("MOVQ", jit.Ptr(_SP, 16), _R9) // MOVQ 16(SP), R9 - self.Emit("MOVQ", _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI - self.Sjmp("JMP" , "_set_value") // JMP _set_value - - /* represent numbers as `int64` */ - self.Link("_use_int64") // _use_int64: - self.Emit("MOVQ", _VAR_ss_Iv, _AX) // MOVQ ss.Iv, AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.call_go(_F_convT64) // CALL_GO runtime.convT64 - self.Emit("MOVQ", _T_int64, _R8) // MOVQ _T_int64, R8 - self.Emit("MOVQ", jit.Ptr(_SP, 8), _R9) // MOVQ 8(SP), R9 - self.Emit("MOVQ", _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI - self.Sjmp("JMP" , "_set_value") // JMP _set_value - - /** V_KEY_SEP **/ - self.Link("_decode_V_KEY_SEP") // _decode_V_KEY_SEP: - // self.Byte(0xcc) - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX - self.Emit("CMPQ", _AX, jit.Imm(_S_obj_delim)) // CMPQ AX, _S_obj_delim - self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char - self.Emit("MOVQ", jit.Imm(_S_val), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_val, ST.Vt[CX] - self.Emit("MOVQ", jit.Imm(_S_obj), jit.Sib(_ST, _CX, 8, _ST_Vt - 8)) // MOVQ _S_obj, ST.Vt[CX - 1] - self.Sjmp("JMP" , "_next") // JMP _next - - /** V_ELEM_SEP **/ - self.Link("_decode_V_ELEM_SEP") // _decode_V_ELEM_SEP: - self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ" , jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX - self.Emit("CMPQ" , _AX, jit.Imm(_S_arr)) // CMPQ _AX, _S_arr - self.Sjmp("JE" , "_array_sep") // JZ _next - self.Emit("CMPQ" , _AX, jit.Imm(_S_obj)) // CMPQ _AX, _S_arr - self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char - self.Emit("MOVQ" , jit.Imm(_S_obj_sep), jit.Sib(_ST, _CX, 8, _ST_Vt)) - self.Sjmp("JMP" , "_next") // JMP _next - - /* arrays */ - self.Link("_array_sep") - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI - self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI - self.Emit("MOVQ", jit.Ptr(_SI, 8), _DX) // MOVQ 8(SI), DX - self.Emit("CMPQ", _DX, jit.Ptr(_SI, 16)) // CMPQ DX, 16(SI) - self.Sjmp("JAE" , "_array_more") // JAE _array_more - - /* add a slot for the new element */ - self.Link("_array_append") // _array_append: - self.Emit("ADDQ", jit.Imm(1), jit.Ptr(_SI, 8)) // ADDQ $1, 8(SI) - self.Emit("MOVQ", jit.Ptr(_SI, 0), _SI) // MOVQ (SI), SI - self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX - self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE} - self.Sjmp("JAE" , "_stack_overflow") - self.Emit("SHLQ", jit.Imm(1), _DX) // SHLQ $1, DX - self.Emit("LEAQ", jit.Sib(_SI, _DX, 8, 0), _SI) // LEAQ (SI)(DX*8), SI - self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp - self.WriteRecNotAX(7 , _SI, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ SI, ST.Vp[CX] - self.Emit("MOVQ", jit.Imm(_S_val), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_val, ST.Vt[CX} - self.Sjmp("JMP" , "_next") // JMP _next - - /** V_ARRAY_END **/ - self.Link("_decode_V_ARRAY_END") // _decode_V_ARRAY_END: - self.Emit("XORL", _DX, _DX) // XORL DX, DX - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX - self.Emit("CMPQ", _AX, jit.Imm(_S_arr_0)) // CMPQ AX, _S_arr_0 - self.Sjmp("JE" , "_first_item") // JE _first_item - self.Emit("CMPQ", _AX, jit.Imm(_S_arr)) // CMPQ AX, _S_arr - self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char - self.Emit("SUBQ", jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp - self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ DX, ST.Vp[CX] - self.Sjmp("JMP" , "_next") // JMP _next - - /* first element of an array */ - self.Link("_first_item") // _first_item: - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("SUBQ", jit.Imm(2), jit.Ptr(_ST, _ST_Sp)) // SUBQ $2, ST.Sp - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp - 8), _SI) // MOVQ ST.Vp[CX - 1], SI - self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI - self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp - 8)) // MOVQ DX, ST.Vp[CX - 1] - self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ DX, ST.Vp[CX] - self.Emit("MOVQ", _DX, jit.Ptr(_SI, 8)) // MOVQ DX, 8(SI) - self.Sjmp("JMP" , "_next") // JMP _next - - /** V_OBJECT_END **/ - self.Link("_decode_V_OBJECT_END") // _decode_V_OBJECT_END: - self.Emit("MOVL", jit.Imm(_S_omask_end), _DX) // MOVL _S_omask, DI - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX - self.Emit("BTQ" , _AX, _DX) - self.Sjmp("JNC" , "_invalid_char") // JNE _invalid_char - self.Emit("XORL", _AX, _AX) // XORL AX, AX - self.Emit("SUBQ", jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp - self.Emit("MOVQ", _AX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ AX, ST.Vp[CX] - self.Sjmp("JMP" , "_next") // JMP _next - - /* return from decoder */ - self.Link("_return") // _return: - self.Emit("XORL", _EP, _EP) // XORL EP, EP - self.Emit("MOVQ", _EP, jit.Ptr(_ST, _ST_Vp)) // MOVQ EP, ST.Vp[0] - self.Link("_epilogue") // _epilogue: - self.Emit("SUBQ", jit.Imm(_FsmOffset), _ST) // SUBQ _FsmOffset, _ST - self.Emit("MOVQ", jit.Ptr(_SP, _VD_offs), _BP) // MOVQ _VD_offs(SP), BP - self.Emit("ADDQ", jit.Imm(_VD_size), _SP) // ADDQ $_VD_size, SP - self.Emit("RET") // RET - - /* array expand */ - self.Link("_array_more") // _array_more: - self.Emit("MOVQ" , _T_eface, _AX) // MOVQ _T_eface, AX - self.Emit("MOVOU", jit.Ptr(_SI, 0), _X0) // MOVOU (SI), X0 - self.Emit("MOVQ" , jit.Ptr(_SI, 16), _DX) // MOVQ 16(SI), DX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP) - self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 24)) // MOVQ DX, 24(SP) - self.Emit("SHLQ" , jit.Imm(1), _DX) // SHLQ $1, DX - self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 32)) // MOVQ DX, 32(SP) - self.call_go(_F_growslice) // CALL_GO runtime.growslice - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _DI) // MOVOU 40(SP), DI - self.Emit("MOVQ" , jit.Ptr(_SP, 48), _DX) // MOVOU 48(SP), DX - self.Emit("MOVQ" , jit.Ptr(_SP, 56), _AX) // MOVQ 56(SP), AX - - /* update the slice */ - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI - self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI - self.Emit("MOVQ", _DX, jit.Ptr(_SI, 8)) // MOVQ DX, 8(SI) - self.Emit("MOVQ", _AX, jit.Ptr(_SI, 16)) // MOVQ AX, 16(AX) - self.WriteRecNotAX(8 , _DI, jit.Ptr(_SI, 0), false) // MOVQ R10, (SI) - self.Sjmp("JMP" , "_array_append") // JMP _array_append - - /* copy string */ - self.Link("copy_string") // pointer: R8, length: AX, return addr: DI - // self.Byte(0xcc) - self.Emit("MOVQ", _R8, _VAR_cs_p) - self.Emit("MOVQ", _AX, _VAR_cs_n) - self.Emit("MOVQ", _DI, _VAR_cs_LR) - self.Emit("MOVQ", _T_byte, jit.Ptr(_SP, 0)) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) - self.call_go(_F_makeslice) - self.Emit("MOVQ", jit.Ptr(_SP, 24), _R8) - self.Emit("MOVQ", _R8, _VAR_cs_d) - self.Emit("MOVQ", _R8, jit.Ptr(_SP, 0)) - self.Emit("MOVQ", _VAR_cs_p, _R8) - self.Emit("MOVQ", _R8, jit.Ptr(_SP, 8)) - self.Emit("MOVQ", _VAR_cs_n, _AX) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) - self.call_go(_F_memmove) - self.Emit("MOVQ", _VAR_cs_d, _R8) - self.Emit("MOVQ", _VAR_cs_n, _AX) - self.Emit("MOVQ", _VAR_cs_LR, _DI) - // self.Byte(0xcc) - self.Rjmp("JMP", _DI) - - /* error handlers */ - self.Link("_stack_overflow") - self.Emit("MOVL" , _E_recurse, _EP) // MOVQ _E_recurse, EP - self.Sjmp("JMP" , "_error") // JMP _error - self.Link("_vtype_error") // _vtype_error: - self.Emit("MOVQ" , _DI, _IC) // MOVQ DI, IC - self.Emit("MOVL" , _E_invalid, _EP) // MOVL _E_invalid, EP - self.Sjmp("JMP" , "_error") // JMP _error - self.Link("_invalid_char") // _invalid_char: - self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC - self.Emit("MOVL" , _E_invalid, _EP) // MOVL _E_invalid, EP - self.Sjmp("JMP" , "_error") // JMP _error - self.Link("_unquote_error") // _unquote_error: - self.Emit("MOVQ" , _VAR_ss_Iv, _IC) // MOVQ ss.Iv, IC - self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC - self.Link("_parsing_error") // _parsing_error: - self.Emit("NEGQ" , _AX) // NEGQ AX - self.Emit("MOVQ" , _AX, _EP) // MOVQ AX, EP - self.Link("_error") // _error: - self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0 - self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP) - self.Sjmp("JMP" , "_epilogue") // JMP _epilogue - - /* invalid value type, never returns */ - self.Link("_invalid_vtype") - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.call(_F_invalid_vtype) // CALL invalid_type - self.Emit("UD2") // UD2 - - /* switch jump table */ - self.Link("_switch_table") // _switch_table: - self.Sref("_decode_V_EOF", 0) // SREF &_decode_V_EOF, $0 - self.Sref("_decode_V_NULL", -4) // SREF &_decode_V_NULL, $-4 - self.Sref("_decode_V_TRUE", -8) // SREF &_decode_V_TRUE, $-8 - self.Sref("_decode_V_FALSE", -12) // SREF &_decode_V_FALSE, $-12 - self.Sref("_decode_V_ARRAY", -16) // SREF &_decode_V_ARRAY, $-16 - self.Sref("_decode_V_OBJECT", -20) // SREF &_decode_V_OBJECT, $-20 - self.Sref("_decode_V_STRING", -24) // SREF &_decode_V_STRING, $-24 - self.Sref("_decode_V_DOUBLE", -28) // SREF &_decode_V_DOUBLE, $-28 - self.Sref("_decode_V_INTEGER", -32) // SREF &_decode_V_INTEGER, $-32 - self.Sref("_decode_V_KEY_SEP", -36) // SREF &_decode_V_KEY_SEP, $-36 - self.Sref("_decode_V_ELEM_SEP", -40) // SREF &_decode_V_ELEM_SEP, $-40 - self.Sref("_decode_V_ARRAY_END", -44) // SREF &_decode_V_ARRAY_END, $-44 - self.Sref("_decode_V_OBJECT_END", -48) // SREF &_decode_V_OBJECT_END, $-48 - - /* fast character lookup table */ - self.Link("_decode_tab") // _decode_tab: - self.Sref("_decode_V_EOF", 0) // SREF &_decode_V_EOF, $0 - - /* generate rest of the tabs */ - for i := 1; i < 256; i++ { - if to, ok := _R_tab[i]; ok { - self.Sref(to, -int64(i) * 4) - } else { - self.Byte(0x00, 0x00, 0x00, 0x00) - } - } -} - -func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) { - self.Emit("MOVQ", _V_writeBarrier, _R10) - self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0)) - self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - if saveDI { - self.save(_DI) - } - self.Emit("LEAQ", rec, _DI) - self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX - self.Rjmp("CALL", _R10) - if saveDI { - self.load(_DI) - } - self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", _AX, rec) - self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") -} - -func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) { - if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX { - panic("rec contains AX!") - } - self.Emit("MOVQ", _V_writeBarrier, _R10) - self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0)) - self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", ptr, _AX) - if saveDI { - self.save(_DI) - } - self.Emit("LEAQ", rec, _DI) - self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX - self.Rjmp("CALL", _R10) - if saveDI { - self.load(_DI) - } - self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", ptr, rec) - self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") -} - -/** Generic Decoder **/ - -var ( - _subr_decode_value = new(_ValueDecoder).build() -) - -//go:nosplit -func invalid_vtype(vt types.ValueType) { - throw(fmt.Sprintf("invalid value type: %d", vt)) -} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/asm.s b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm.s similarity index 100% rename from vendor/github.com/bytedance/sonic/internal/decoder/asm.s rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm.s diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm_stubs_amd64_go117.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm_stubs_amd64_go117.go new file mode 100644 index 0000000000000000000000000000000000000000..48f73e5bf29b4cc3a7c2d7026658d449326565f7 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm_stubs_amd64_go117.go @@ -0,0 +1,126 @@ +// +build go1.17,!go1.21 + +// Copyright 2023 CloudWeGo Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package jitdec + +import ( + `strconv` + `unsafe` + + `github.com/bytedance/sonic/internal/jit` + `github.com/twitchyliquid64/golang-asm/obj` + `github.com/twitchyliquid64/golang-asm/obj/x86` +) + +//go:linkname _runtime_writeBarrier runtime.writeBarrier +var _runtime_writeBarrier uintptr + +//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier +func gcWriteBarrierAX() + +var ( + _V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&_runtime_writeBarrier)))) + + _F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX) +) + +func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) { + self.Emit("MOVQ", _V_writeBarrier, _R9) + self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0)) + self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") + if saveDI { + self.save(_DI) + } + self.Emit("LEAQ", rec, _DI) + self.call(_F_gcWriteBarrierAX) + if saveDI { + self.load(_DI) + } + self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") + self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") + self.Emit("MOVQ", _AX, rec) + self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") +} + +func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) { + if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX { + panic("rec contains AX!") + } + self.Emit("MOVQ", _V_writeBarrier, _R9) + self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0)) + self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") + if saveAX { + self.Emit("XCHGQ", ptr, _AX) + } else { + self.Emit("MOVQ", ptr, _AX) + } + if saveDI { + self.save(_DI) + } + self.Emit("LEAQ", rec, _DI) + self.call(_F_gcWriteBarrierAX) + if saveDI { + self.load(_DI) + } + if saveAX { + self.Emit("XCHGQ", ptr, _AX) + } + self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") + self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") + self.Emit("MOVQ", ptr, rec) + self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") +} + + +func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) { + self.Emit("MOVQ", _V_writeBarrier, _R9) + self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0)) + self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") + if saveDI { + self.save(_DI) + } + self.Emit("LEAQ", rec, _DI) + self.call(_F_gcWriteBarrierAX) + if saveDI { + self.load(_DI) + } + self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") + self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") + self.Emit("MOVQ", _AX, rec) + self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") +} + +func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) { + if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX { + panic("rec contains AX!") + } + self.Emit("MOVQ", _V_writeBarrier, _AX) + self.Emit("CMPL", jit.Ptr(_AX, 0), jit.Imm(0)) + self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") + self.Emit("MOVQ", ptr, _AX) + if saveDI { + self.save(_DI) + } + self.Emit("LEAQ", rec, _DI) + self.call(_F_gcWriteBarrierAX) + if saveDI { + self.load(_DI) + } + self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") + self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") + self.Emit("MOVQ", ptr, rec) + self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm_stubs_amd64_go121.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm_stubs_amd64_go121.go new file mode 100644 index 0000000000000000000000000000000000000000..cbec3d248474a0076cc159de0ad78def16c9761d --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm_stubs_amd64_go121.go @@ -0,0 +1,132 @@ +// +build go1.21,!go1.24 + +// Copyright 2023 CloudWeGo Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package jitdec + +import ( + `strconv` + `unsafe` + + `github.com/bytedance/sonic/internal/jit` + `github.com/twitchyliquid64/golang-asm/obj` + `github.com/twitchyliquid64/golang-asm/obj/x86` +) + +//go:linkname _runtime_writeBarrier runtime.writeBarrier +var _runtime_writeBarrier uintptr + +//go:nosplit +//go:linkname gcWriteBarrier2 runtime.gcWriteBarrier2 +func gcWriteBarrier2() + +// Notice: gcWriteBarrier must use R11 register!! +var _R11 = _IC + +var ( + _V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&_runtime_writeBarrier)))) + + _F_gcWriteBarrier2 = jit.Func(gcWriteBarrier2) +) + +func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) { + self.Emit("MOVQ", _V_writeBarrier, _R9) + self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0)) + self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") + if saveDI { + self.save(_DI, _R11) + } else { + self.save(_R11) + } + self.Emit("MOVQ", _F_gcWriteBarrier2, _R11) + self.Rjmp("CALL", _R11) + self.Emit("MOVQ", _AX, jit.Ptr(_R11, 0)) + self.Emit("MOVQ", rec, _DI) + self.Emit("MOVQ", _DI, jit.Ptr(_R11, 8)) + if saveDI { + self.load(_DI, _R11) + } else { + self.load(_R11) + } + self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") + self.Emit("MOVQ", _AX, rec) +} + +func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) { + if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX { + panic("rec contains AX!") + } + self.Emit("MOVQ", _V_writeBarrier, _R9) + self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0)) + self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") + if saveAX { + self.save(_AX, _R11) + } else { + self.save(_R11) + } + self.Emit("MOVQ", _F_gcWriteBarrier2, _R11) + self.Rjmp("CALL", _R11) + self.Emit("MOVQ", ptr, jit.Ptr(_R11, 0)) + self.Emit("MOVQ", rec, _AX) + self.Emit("MOVQ", _AX, jit.Ptr(_R11, 8)) + if saveAX { + self.load(_AX, _R11) + } else { + self.load(_R11) + } + self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") + self.Emit("MOVQ", ptr, rec) +} + +func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) { + self.Emit("MOVQ", _V_writeBarrier, _R9) + self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0)) + self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") + if saveDI { + self.save(_DI, _R11) + } else { + self.save(_R11) + } + self.Emit("MOVQ", _F_gcWriteBarrier2, _R11) + self.Rjmp("CALL", _R11) + self.Emit("MOVQ", _AX, jit.Ptr(_R11, 0)) + self.Emit("MOVQ", rec, _DI) + self.Emit("MOVQ", _DI, jit.Ptr(_R11, 8)) + if saveDI { + self.load(_DI, _R11) + } else { + self.load(_R11) + } + self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") + self.Emit("MOVQ", _AX, rec) +} + +func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) { + if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX { + panic("rec contains AX!") + } + self.Emit("MOVQ", _V_writeBarrier, _AX) + self.Emit("CMPL", jit.Ptr(_AX, 0), jit.Imm(0)) + self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") + self.save(_R11) + self.Emit("MOVQ", _F_gcWriteBarrier2, _R11) + self.Rjmp("CALL", _R11) + self.Emit("MOVQ", ptr, jit.Ptr(_R11, 0)) + self.Emit("MOVQ", rec, _AX) + self.Emit("MOVQ", _AX, jit.Ptr(_R11, 8)) + self.load(_R11) + self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") + self.Emit("MOVQ", ptr, rec) +} \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/assembler_amd64_go117.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/assembler_regabi_amd64.go similarity index 95% rename from vendor/github.com/bytedance/sonic/internal/decoder/assembler_amd64_go117.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/assembler_regabi_amd64.go index 27413739de9ee216ed6c33991febe1737dbc6ad8..4ff3b1962eca165583188474275b0523e49616de 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/assembler_amd64_go117.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/assembler_regabi_amd64.go @@ -1,5 +1,5 @@ -//go:build go1.17 && !go1.21 -// +build go1.17,!go1.21 +//go:build go1.17 && !go1.24 +// +build go1.17,!go1.24 /* * Copyright 2021 ByteDance Inc. @@ -17,23 +17,22 @@ * limitations under the License. */ -package decoder +package jitdec import ( - `encoding/json` - `fmt` - `math` - `reflect` - `strconv` - `unsafe` - - `github.com/bytedance/sonic/internal/caching` - `github.com/bytedance/sonic/internal/jit` - `github.com/bytedance/sonic/internal/native` - `github.com/bytedance/sonic/internal/native/types` - `github.com/bytedance/sonic/internal/rt` - `github.com/twitchyliquid64/golang-asm/obj` - `github.com/twitchyliquid64/golang-asm/obj/x86` + "encoding/json" + "fmt" + "math" + "reflect" + "strings" + "unsafe" + + "github.com/bytedance/sonic/internal/caching" + "github.com/bytedance/sonic/internal/jit" + "github.com/bytedance/sonic/internal/native" + "github.com/bytedance/sonic/internal/native/types" + "github.com/bytedance/sonic/internal/rt" + "github.com/twitchyliquid64/golang-asm/obj" ) /** Register Allocations @@ -137,6 +136,7 @@ var ( _R9 = jit.Reg("R9") _X0 = jit.Reg("X0") _X1 = jit.Reg("X1") + _X15 = jit.Reg("X15") ) var ( @@ -294,7 +294,6 @@ var _OpFuncTab = [256]func(*_Assembler, *_Instr) { _OP_array_clear_p : (*_Assembler)._asm_OP_array_clear_p, _OP_slice_init : (*_Assembler)._asm_OP_slice_init, _OP_slice_append : (*_Assembler)._asm_OP_slice_append, - _OP_object_skip : (*_Assembler)._asm_OP_object_skip, _OP_object_next : (*_Assembler)._asm_OP_object_next, _OP_struct_field : (*_Assembler)._asm_OP_struct_field, _OP_unmarshal : (*_Assembler)._asm_OP_unmarshal, @@ -314,6 +313,7 @@ var _OpFuncTab = [256]func(*_Assembler, *_Instr) { _OP_check_char_0 : (*_Assembler)._asm_OP_check_char_0, _OP_dismatch_err : (*_Assembler)._asm_OP_dismatch_err, _OP_go_skip : (*_Assembler)._asm_OP_go_skip, + _OP_skip_emtpy : (*_Assembler)._asm_OP_skip_empty, _OP_add : (*_Assembler)._asm_OP_add, _OP_check_empty : (*_Assembler)._asm_OP_check_empty, _OP_debug : (*_Assembler)._asm_OP_debug, @@ -387,7 +387,7 @@ func (self *_Assembler) prologue() { var ( _REG_go = []obj.Addr { _ST, _VP, _IP, _IL, _IC } - _REG_rt = []obj.Addr { _ST, _VP, _IP, _IL, _IC, _IL } + _REG_rt = []obj.Addr { _ST, _VP, _IP, _IL, _IC } ) func (self *_Assembler) save(r ...obj.Addr) { @@ -422,9 +422,10 @@ func (self *_Assembler) call_go(fn obj.Addr) { } func (self *_Assembler) callc(fn obj.Addr) { - self.Emit("XCHGQ", _IP, _BP) + self.save(_IP) self.call(fn) - self.Emit("XCHGQ", _IP, _BP) + self.Emit("XORPS", _X15, _X15) + self.load(_IP) } func (self *_Assembler) call_c(fn obj.Addr) { @@ -493,9 +494,9 @@ func (self *_Assembler) type_error() { func (self *_Assembler) mismatch_error() { self.Link(_LB_mismatch_error) // _type_error: self.Emit("MOVQ", _VAR_et, _ET) // MOVQ _VAR_et, ET - self.Emit("MOVQ", _VAR_ic, _EP) // MOVQ _VAR_ic, EP self.Emit("MOVQ", _I_json_MismatchTypeError, _CX) // MOVQ _I_json_MismatchType, CX self.Emit("CMPQ", _ET, _CX) // CMPQ ET, CX + self.Emit("MOVQ", jit.Ptr(_ST, _EpOffset), _EP) // MOVQ stack.Ep, EP self.Sjmp("JE" , _LB_error) // JE _LB_error self.Emit("MOVQ", _ARG_sp, _AX) self.Emit("MOVQ", _ARG_sl, _BX) @@ -601,6 +602,28 @@ func (self *_Assembler) _asm_OP_go_skip(p *_Instr) { self.Sjmp("JMP" , _LB_skip_one) // JMP _skip_one } +var _F_IndexByte = jit.Func(strings.IndexByte) + +func (self *_Assembler) _asm_OP_skip_empty(p *_Instr) { + // self.Byte(0xcc) + self.call_sf(_F_skip_one) // CALL_SF skip_one + // self.Byte(0xcc) + self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX + self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v + self.Emit("BTQ", jit.Imm(_F_disable_unknown), _ARG_fv) + self.Xjmp("JNC", p.vi()) + self.Emit("LEAQ", jit.Sib(_IC, _AX, 1, 0), _BX) + self.Emit("MOVQ", _BX, _ARG_sv_n) + self.Emit("LEAQ", jit.Sib(_IP, _AX, 1, 0), _AX) + self.Emit("MOVQ", _AX, _ARG_sv_p) + self.Emit("MOVQ", jit.Imm(':'), _CX) + self.call_go(_F_IndexByte) + // self.Byte(0xcc) + self.Emit("TESTQ", _AX, _AX) + // disallow unknown field + self.Sjmp("JNS", _LB_field_error) +} + func (self *_Assembler) skip_one() { self.Link(_LB_skip_one) // _skip: self.Emit("MOVQ", _VAR_ic, _IC) // MOVQ _VAR_ic, IC @@ -608,7 +631,6 @@ func (self *_Assembler) skip_one() { self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v self.Emit("MOVQ" , _VAR_pc, _R9) // MOVQ pc, R9 - // self.Byte(0xcc) self.Rjmp("JMP" , _R9) // JMP (R9) } @@ -825,8 +847,8 @@ var ( ) var ( - _Vp_max_f32 = new(float64) - _Vp_min_f32 = new(float64) + _Vp_max_f32 = new(float32) + _Vp_min_f32 = new(float32) ) func init() { @@ -835,17 +857,15 @@ func init() { } func (self *_Assembler) range_single_X0() { - self.Emit("MOVSD" , _VAR_st_Dv, _X0) // MOVSD st.Dv, X0 + self.Emit("CVTSD2SS", _VAR_st_Dv, _X0) // CVTSD2SS _VAR_st_Dv, X0 self.Emit("MOVQ" , _V_max_f32, _CX) // MOVQ _max_f32, CX self.Emit("MOVQ" , jit.Gitab(_I_float32), _ET) // MOVQ ${itab(float32)}, ET self.Emit("MOVQ" , jit.Gtype(_T_float32), _EP) // MOVQ ${type(float32)}, EP - self.Emit("UCOMISD" , jit.Ptr(_CX, 0), _X0) // UCOMISD (CX), X0 + self.Emit("UCOMISS" , jit.Ptr(_CX, 0), _X0) // UCOMISS (CX), X0 self.Sjmp("JA" , _LB_range_error) // JA _range_error self.Emit("MOVQ" , _V_min_f32, _CX) // MOVQ _min_f32, CX - self.Emit("MOVSD" , jit.Ptr(_CX, 0), _X1) // MOVSD (CX), X1 - self.Emit("UCOMISD" , _X0, _X1) // UCOMISD X0, X1 - self.Sjmp("JA" , _LB_range_error) // JA _range_error - self.Emit("CVTSD2SS", _X0, _X0) // CVTSD2SS X0, X0 + self.Emit("UCOMISS" , jit.Ptr(_CX, 0), _X0) // UCOMISS (CX), X0 + self.Sjmp("JB" , _LB_range_error) // JB _range_error } func (self *_Assembler) range_signed_CX(i *rt.GoItab, t *rt.GoType, a int64, b int64) { @@ -976,11 +996,13 @@ var ( var ( _F_decodeJsonUnmarshaler obj.Addr + _F_decodeJsonUnmarshalerQuoted obj.Addr _F_decodeTextUnmarshaler obj.Addr ) func init() { _F_decodeJsonUnmarshaler = jit.Func(decodeJsonUnmarshaler) + _F_decodeJsonUnmarshalerQuoted = jit.Func(decodeJsonUnmarshalerQuoted) _F_decodeTextUnmarshaler = jit.Func(decodeTextUnmarshaler) } @@ -1061,18 +1083,18 @@ func (self *_Assembler) mapassign_utext(t reflect.Type, addressable bool) { var ( _F_skip_one = jit.Imm(int64(native.S_skip_one)) _F_skip_array = jit.Imm(int64(native.S_skip_array)) - _F_skip_object = jit.Imm(int64(native.S_skip_object)) _F_skip_number = jit.Imm(int64(native.S_skip_number)) ) -func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool) { +func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool, f obj.Addr) { self.call_sf(_F_skip_one) // CALL_SF skip_one self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v + self.Emit("MOVQ", _IC, _VAR_ic) // store for mismatche error skip self.slice_from_r(_AX, 0) // SLICE_R AX, $0 self.Emit("MOVQ" , _DI, _ARG_sv_p) // MOVQ DI, sv.p self.Emit("MOVQ" , _SI, _ARG_sv_n) // MOVQ SI, sv.n - self.unmarshal_func(t, _F_decodeJsonUnmarshaler, deref) // UNMARSHAL json, ${t}, ${deref} + self.unmarshal_func(t, f, deref) // UNMARSHAL json, ${t}, ${deref} } func (self *_Assembler) unmarshal_text(t reflect.Type, deref bool) { @@ -1107,7 +1129,15 @@ func (self *_Assembler) unmarshal_func(t reflect.Type, fn obj.Addr, deref bool) self.Emit("MOVQ" , _ARG_sv_n, _DI) // MOVQ sv.n, DI self.call_go(fn) // CALL_GO ${fn} self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error + self.Sjmp("JZ" , "_unmarshal_func_end_{n}") // JNZ _error + self.Emit("MOVQ", _I_json_MismatchTypeError, _CX) // MOVQ ET, VAR.et + self.Emit("CMPQ", _ET, _CX) // check if MismatchedError + self.Sjmp("JNE" , _LB_error) + self.Emit("MOVQ", jit.Type(t), _CX) // store current type + self.Emit("MOVQ", _CX, _VAR_et) // store current type + self.Emit("MOVQ", _VAR_ic, _IC) // recover the pos + self.Emit("XORL", _ET, _ET) + self.Link("_unmarshal_func_end_{n}") } /** Dynamic Decoding Routine **/ @@ -1140,8 +1170,8 @@ func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) { self.Emit("MOVQ", _I_json_MismatchTypeError, _CX) // MOVQ _I_json_MismatchTypeError, CX self.Emit("CMPQ", _ET, _CX) // CMPQ ET, CX self.Sjmp("JNE", _LB_error) // JNE LB_error - self.Emit("MOVQ", _EP, _VAR_ic) // MOVQ EP, VAR_ic self.Emit("MOVQ", _ET, _VAR_et) // MOVQ ET, VAR_et + self.WriteRecNotAX(14, _EP, jit.Ptr(_ST, _EpOffset), false, false) // MOVQ EP, stack.Ep self.Link("_decode_dynamic_end_{n}") } @@ -1150,7 +1180,7 @@ func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) { var ( _F_memequal = jit.Func(memequal) _F_memmove = jit.Func(memmove) - _F_growslice = jit.Func(growslice) + _F_growslice = jit.Func(rt.GrowSlice) _F_makeslice = jit.Func(makeslice) _F_makemap_small = jit.Func(makemap_small) _F_mapassign_fast64 = jit.Func(mapassign_fast64) @@ -1168,7 +1198,7 @@ var ( var ( _F_FieldMap_GetCaseInsensitive obj.Addr - _Empty_Slice = make([]byte, 0) + _Empty_Slice = []byte{} _Zero_Base = int64(uintptr(((*rt.GoSlice)(unsafe.Pointer(&_Empty_Slice))).Ptr)) ) @@ -1645,7 +1675,8 @@ func (self *_Assembler) _asm_OP_check_empty(p *_Instr) { self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(rbracket))) // CMPB (IP)(IC), ']' self.Sjmp("JNE" , "_not_empty_array_{n}") // JNE _not_empty_array_{n} self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC - self.StorePtr(_Zero_Base, jit.Ptr(_VP, 0), _AX) // MOVQ $zerobase, (VP) + self.Emit("MOVQ", jit.Imm(_Zero_Base), _AX) + self.WritePtrAX(9, jit.Ptr(_VP, 0), false) self.Emit("PXOR", _X0, _X0) // PXOR X0, X0 self.Emit("MOVOU", _X0, jit.Ptr(_VP, 8)) // MOVOU X0, 8(VP) self.Xjmp("JMP" , p.vi()) // JMP {p.vi()} @@ -1701,12 +1732,6 @@ func (self *_Assembler) _asm_OP_slice_append(p *_Instr) { self.Link("_append_slice_end_{n}") } -func (self *_Assembler) _asm_OP_object_skip(_ *_Instr) { - self.call_sf(_F_skip_object) // CALL_SF skip_object - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v -} - func (self *_Assembler) _asm_OP_object_next(_ *_Instr) { self.call_sf(_F_skip_one) // CALL_SF skip_one self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX @@ -1777,11 +1802,19 @@ func (self *_Assembler) _asm_OP_struct_field(p *_Instr) { } func (self *_Assembler) _asm_OP_unmarshal(p *_Instr) { - self.unmarshal_json(p.vt(), true) + if iv := p.i64(); iv != 0 { + self.unmarshal_json(p.vt(), true, _F_decodeJsonUnmarshalerQuoted) + } else { + self.unmarshal_json(p.vt(), true, _F_decodeJsonUnmarshaler) + } } func (self *_Assembler) _asm_OP_unmarshal_p(p *_Instr) { - self.unmarshal_json(p.vt(), false) + if iv := p.i64(); iv != 0 { + self.unmarshal_json(p.vt(), false, _F_decodeJsonUnmarshalerQuoted) + } else { + self.unmarshal_json(p.vt(), false, _F_decodeJsonUnmarshaler) + } } func (self *_Assembler) _asm_OP_unmarshal_text(p *_Instr) { @@ -1931,62 +1964,3 @@ func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) { self.Emit("MOVQ", jit.Imm(int64(i)), _AX) // MOVQ $(i), (SP) self.call_go(_F_println) } - -//go:linkname _runtime_writeBarrier runtime.writeBarrier -var _runtime_writeBarrier uintptr - -//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier -func gcWriteBarrierAX() - -var ( - _V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&_runtime_writeBarrier)))) - - _F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX) -) - -func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) { - self.Emit("MOVQ", _V_writeBarrier, _R9) - self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0)) - self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - if saveDI { - self.save(_DI) - } - self.Emit("LEAQ", rec, _DI) - self.call(_F_gcWriteBarrierAX) - if saveDI { - self.load(_DI) - } - self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", _AX, rec) - self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") -} - -func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) { - if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX { - panic("rec contains AX!") - } - self.Emit("MOVQ", _V_writeBarrier, _R9) - self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0)) - self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - if saveAX { - self.Emit("XCHGQ", ptr, _AX) - } else { - self.Emit("MOVQ", ptr, _AX) - } - if saveDI { - self.save(_DI) - } - self.Emit("LEAQ", rec, _DI) - self.call(_F_gcWriteBarrierAX) - if saveDI { - self.load(_DI) - } - if saveAX { - self.Emit("XCHGQ", ptr, _AX) - } - self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", ptr, rec) - self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") -} \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/compiler.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/compiler.go similarity index 93% rename from vendor/github.com/bytedance/sonic/internal/decoder/compiler.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/compiler.go index 8f3905fca37e0337a16545652cf2a4dba23608b8..a097d171db851d7f5b9c2bb34125655fc7739bf8 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/compiler.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/compiler.go @@ -14,7 +14,7 @@ * limitations under the License. */ -package decoder +package jitdec import ( `encoding/json` @@ -77,7 +77,6 @@ const ( _OP_array_clear_p _OP_slice_init _OP_slice_append - _OP_object_skip _OP_object_next _OP_struct_field _OP_unmarshal @@ -97,6 +96,7 @@ const ( _OP_check_char_0 _OP_dismatch_err _OP_go_skip + _OP_skip_emtpy _OP_add _OP_check_empty _OP_debug @@ -155,7 +155,6 @@ var _OpNames = [256]string { _OP_array_skip : "array_skip", _OP_slice_init : "slice_init", _OP_slice_append : "slice_append", - _OP_object_skip : "object_skip", _OP_object_next : "object_next", _OP_struct_field : "struct_field", _OP_unmarshal : "unmarshal", @@ -271,6 +270,13 @@ func newInsVt(op _Op, vt reflect.Type) _Instr { } } +func newInsVtI(op _Op, vt reflect.Type, iv int) _Instr { + return _Instr { + u: packOp(op) | rt.PackInt(iv), + p: unsafe.Pointer(rt.UnpackType(vt)), + } +} + func newInsVf(op _Op, vf *caching.FieldMap) _Instr { return _Instr { u: packOp(op), @@ -452,6 +458,10 @@ func (self *_Program) rtt(op _Op, vt reflect.Type) { *self = append(*self, newInsVt(op, vt)) } +func (self *_Program) rtti(op _Op, vt reflect.Type, iv int) { + *self = append(*self, newInsVtI(op, vt, iv)) +} + func (self *_Program) fmv(op _Op, vf *caching.FieldMap) { *self = append(*self, newInsVf(op, vf)) } @@ -527,40 +537,66 @@ func (self *_Compiler) compile(vt reflect.Type) (ret _Program, err error) { return } -func (self *_Compiler) compileOne(p *_Program, sp int, vt reflect.Type) { - /* check for recursive nesting */ - ok := self.tab[vt] - if ok { - p.rtt(_OP_recurse, vt) - return - } +const ( + checkMarshalerFlags_quoted = 1 +) +func (self *_Compiler) checkMarshaler(p *_Program, vt reflect.Type, flags int, exec bool) bool { pt := reflect.PtrTo(vt) /* check for `json.Unmarshaler` with pointer receiver */ if pt.Implements(jsonUnmarshalerType) { - p.rtt(_OP_unmarshal_p, pt) - return + if exec { + p.add(_OP_lspace) + p.rtti(_OP_unmarshal_p, pt, flags) + } + return true } /* check for `json.Unmarshaler` */ if vt.Implements(jsonUnmarshalerType) { - p.add(_OP_lspace) - self.compileUnmarshalJson(p, vt) - return + if exec { + p.add(_OP_lspace) + self.compileUnmarshalJson(p, vt, flags) + } + return true + } + + if flags == checkMarshalerFlags_quoted { + // text marshaler shouldn't be supported for quoted string + return false } /* check for `encoding.TextMarshaler` with pointer receiver */ if pt.Implements(encodingTextUnmarshalerType) { - p.add(_OP_lspace) - self.compileUnmarshalTextPtr(p, pt) - return + if exec { + p.add(_OP_lspace) + self.compileUnmarshalTextPtr(p, pt, flags) + } + return true } /* check for `encoding.TextUnmarshaler` */ if vt.Implements(encodingTextUnmarshalerType) { - p.add(_OP_lspace) - self.compileUnmarshalText(p, vt) + if exec { + p.add(_OP_lspace) + self.compileUnmarshalText(p, vt, flags) + } + return true + } + + return false +} + +func (self *_Compiler) compileOne(p *_Program, sp int, vt reflect.Type) { + /* check for recursive nesting */ + ok := self.tab[vt] + if ok { + p.rtt(_OP_recurse, vt) + return + } + + if self.checkMarshaler(p, vt, 0, true) { return } @@ -683,17 +719,9 @@ func (self *_Compiler) compilePtr(p *_Program, sp int, et reflect.Type) { /* dereference all the way down */ for et.Kind() == reflect.Ptr { - if et.Implements(jsonUnmarshalerType) { - p.rtt(_OP_unmarshal_p, et) + if self.checkMarshaler(p, et, 0, true) { return } - - if et.Implements(encodingTextUnmarshalerType) { - p.add(_OP_lspace) - self.compileUnmarshalTextPtr(p, et) - return - } - et = et.Elem() p.rtt(_OP_deref, et) } @@ -706,7 +734,7 @@ func (self *_Compiler) compilePtr(p *_Program, sp int, et reflect.Type) { /* enter the recursion */ p.add(_OP_lspace) self.tab[et] = true - + /* not inline the pointer type * recursing the defined pointer type's elem will casue issue379. */ @@ -716,8 +744,12 @@ func (self *_Compiler) compilePtr(p *_Program, sp int, et reflect.Type) { j := p.pc() p.add(_OP_goto) + + // set val pointer as nil p.pin(i) p.add(_OP_nil_1) + + // nothing todo p.pin(j) } @@ -869,7 +901,24 @@ func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) { n := p.pc() p.add(_OP_is_null) - skip := self.checkIfSkip(p, vt, '{') + j := p.pc() + p.chr(_OP_check_char_0, '{') + p.rtt(_OP_dismatch_err, vt) + + /* special case for empty object */ + if len(fv) == 0 { + p.pin(j) + s := p.pc() + p.add(_OP_skip_emtpy) + p.pin(s) + p.pin(n) + return + } + + skip := p.pc() + p.add(_OP_go_skip) + p.pin(j) + p.int(_OP_add, 1) p.add(_OP_save) p.add(_OP_lspace) @@ -887,11 +936,6 @@ func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) { p.chr(_OP_check_char, '}') p.chr(_OP_match_char, ',') - /* special case of an empty struct */ - if len(fv) == 0 { - p.add(_OP_object_skip) - goto end_of_object - } /* match the remaining fields */ p.add(_OP_lspace) @@ -927,7 +971,6 @@ func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) { p.int(_OP_goto, y0) } -end_of_object: p.pin(x) p.pin(y1) p.add(_OP_drop) @@ -935,7 +978,22 @@ end_of_object: p.pin(skip) } +func (self *_Compiler) compileStructFieldStrUnmarshal(p *_Program, vt reflect.Type) { + p.add(_OP_lspace) + n0 := p.pc() + p.add(_OP_is_null) + self.checkMarshaler(p, vt, checkMarshalerFlags_quoted, true) + p.pin(n0) +} + func (self *_Compiler) compileStructFieldStr(p *_Program, sp int, vt reflect.Type) { + // according to std, json.Unmarshaler should be called before stringize + // see https://github.com/bytedance/sonic/issues/670 + if self.checkMarshaler(p, vt, checkMarshalerFlags_quoted, false) { + self.compileStructFieldStrUnmarshal(p, vt) + return + } + n1 := -1 ft := vt sv := false @@ -1103,7 +1161,7 @@ func (self *_Compiler) compileUnmarshalEnd(p *_Program, vt reflect.Type, i int) p.pin(j) } -func (self *_Compiler) compileUnmarshalJson(p *_Program, vt reflect.Type) { +func (self *_Compiler) compileUnmarshalJson(p *_Program, vt reflect.Type, flags int) { i := p.pc() v := _OP_unmarshal p.add(_OP_is_null) @@ -1114,11 +1172,11 @@ func (self *_Compiler) compileUnmarshalJson(p *_Program, vt reflect.Type) { } /* call the unmarshaler */ - p.rtt(v, vt) + p.rtti(v, vt, flags) self.compileUnmarshalEnd(p, vt, i) } -func (self *_Compiler) compileUnmarshalText(p *_Program, vt reflect.Type) { +func (self *_Compiler) compileUnmarshalText(p *_Program, vt reflect.Type, iv int) { i := p.pc() v := _OP_unmarshal_text p.add(_OP_is_null) @@ -1131,15 +1189,15 @@ func (self *_Compiler) compileUnmarshalText(p *_Program, vt reflect.Type) { } /* call the unmarshaler */ - p.rtt(v, vt) + p.rtti(v, vt, iv) self.compileUnmarshalEnd(p, vt, i) } -func (self *_Compiler) compileUnmarshalTextPtr(p *_Program, vt reflect.Type) { +func (self *_Compiler) compileUnmarshalTextPtr(p *_Program, vt reflect.Type, iv int) { i := p.pc() p.add(_OP_is_null) p.chr(_OP_match_char, '"') - p.rtt(_OP_unmarshal_text_p, vt) + p.rtti(_OP_unmarshal_text_p, vt, iv) p.pin(i) } @@ -1152,4 +1210,4 @@ func (self *_Compiler) checkIfSkip(p *_Program, vt reflect.Type, c byte) int { p.pin(j) p.int(_OP_add, 1) return s -} \ No newline at end of file +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/debug.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/debug.go similarity index 99% rename from vendor/github.com/bytedance/sonic/internal/decoder/debug.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/debug.go index 9cf3a6a00219da7e43464a6aa669dd00df6c8418..b59a3e571a8ce6f0d3f399e51c072b00657def84 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/debug.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/debug.go @@ -14,7 +14,7 @@ * limitations under the License. */ -package decoder +package jitdec import ( `os` @@ -67,4 +67,4 @@ func (self *_Assembler) debug_instr(i int, v *_Instr) { } self.force_gc() } -} \ No newline at end of file +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/decoder.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/decoder.go new file mode 100644 index 0000000000000000000000000000000000000000..bbb4b4b61c045cddf6e23985ebc28ddc18f54f07 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/decoder.go @@ -0,0 +1,140 @@ +package jitdec + +import ( + `unsafe` + `encoding/json` + `reflect` + `runtime` + + `github.com/bytedance/sonic/internal/decoder/consts` + `github.com/bytedance/sonic/internal/decoder/errors` + `github.com/bytedance/sonic/internal/rt` + `github.com/bytedance/sonic/utf8` + `github.com/bytedance/sonic/option` +) + +type ( + MismatchTypeError = errors.MismatchTypeError + SyntaxError = errors.SyntaxError +) + +const ( + _F_allow_control = consts.F_allow_control + _F_copy_string = consts.F_copy_string + _F_disable_unknown = consts.F_disable_unknown + _F_disable_urc = consts.F_disable_urc + _F_use_int64 = consts.F_use_int64 + _F_use_number = consts.F_use_number + _F_no_validate_json = consts.F_no_validate_json + _F_validate_string = consts.F_validate_string +) + +var ( + error_wrap = errors.ErrorWrap + error_type = errors.ErrorType + error_field = errors.ErrorField + error_value = errors.ErrorValue + error_mismatch = errors.ErrorMismatch + stackOverflow = errors.StackOverflow +) + + +// Decode parses the JSON-encoded data from current position and stores the result +// in the value pointed to by val. +func Decode(s *string, i *int, f uint64, val interface{}) error { + /* validate json if needed */ + if (f & (1 << _F_validate_string)) != 0 && !utf8.ValidateString(*s){ + dbuf := utf8.CorrectWith(nil, rt.Str2Mem(*s), "\ufffd") + *s = rt.Mem2Str(dbuf) + } + + vv := rt.UnpackEface(val) + vp := vv.Value + + /* check for nil type */ + if vv.Type == nil { + return &json.InvalidUnmarshalError{} + } + + /* must be a non-nil pointer */ + if vp == nil || vv.Type.Kind() != reflect.Ptr { + return &json.InvalidUnmarshalError{Type: vv.Type.Pack()} + } + + etp := rt.PtrElem(vv.Type) + + /* check the defined pointer type for issue 379 */ + if vv.Type.IsNamed() { + newp := vp + etp = vv.Type + vp = unsafe.Pointer(&newp) + } + + /* create a new stack, and call the decoder */ + sb := newStack() + nb, err := decodeTypedPointer(*s, *i, etp, vp, sb, f) + /* return the stack back */ + *i = nb + freeStack(sb) + + /* avoid GC ahead */ + runtime.KeepAlive(vv) + return err +} + + +// Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in +// order to reduce the first-hit latency. +// +// Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is +// a compile option to set the depth of recursive compile for the nested struct type. +func Pretouch(vt reflect.Type, opts ...option.CompileOption) error { + cfg := option.DefaultCompileOptions() + for _, opt := range opts { + opt(&cfg) + } + return pretouchRec(map[reflect.Type]bool{vt:true}, cfg) +} + +func pretouchType(_vt reflect.Type, opts option.CompileOptions) (map[reflect.Type]bool, error) { + /* compile function */ + compiler := newCompiler().apply(opts) + decoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) { + if pp, err := compiler.compile(_vt); err != nil { + return nil, err + } else { + as := newAssembler(pp) + as.name = _vt.String() + return as.Load(), nil + } + } + + /* find or compile */ + vt := rt.UnpackType(_vt) + if val := programCache.Get(vt); val != nil { + return nil, nil + } else if _, err := programCache.Compute(vt, decoder); err == nil { + return compiler.rec, nil + } else { + return nil, err + } +} + +func pretouchRec(vtm map[reflect.Type]bool, opts option.CompileOptions) error { + if opts.RecursiveDepth < 0 || len(vtm) == 0 { + return nil + } + next := make(map[reflect.Type]bool) + for vt := range(vtm) { + sub, err := pretouchType(vt, opts) + if err != nil { + return err + } + for svt := range(sub) { + next[svt] = true + } + } + opts.RecursiveDepth -= 1 + return pretouchRec(next, opts) +} + diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/generic_amd64_go117.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/generic_regabi_amd64.go similarity index 95% rename from vendor/github.com/bytedance/sonic/internal/decoder/generic_amd64_go117.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/generic_regabi_amd64.go index df1cd9f5bb71d3022bf91e853a6badb6e8041eaf..e6d5e3e84229ef942bfaa812bbb23b53ea107de4 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/generic_amd64_go117.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/generic_regabi_amd64.go @@ -1,5 +1,4 @@ -//go:build go1.17 && !go1.21 -// +build go1.17,!go1.21 +// +build go1.17,!go1.24 /* * Copyright 2021 ByteDance Inc. @@ -17,19 +16,17 @@ * limitations under the License. */ -package decoder +package jitdec import ( `encoding/json` `fmt` `reflect` - `strconv` `github.com/bytedance/sonic/internal/jit` `github.com/bytedance/sonic/internal/native` `github.com/bytedance/sonic/internal/native/types` `github.com/twitchyliquid64/golang-asm/obj` - `github.com/twitchyliquid64/golang-asm/obj/x86` ) /** Crucial Registers: @@ -122,9 +119,9 @@ func (self *_ValueDecoder) call_go(fn obj.Addr) { } func (self *_ValueDecoder) callc(fn obj.Addr) { - self.Emit("XCHGQ", _IP, _BP) + self.save(_IP) self.call(fn) - self.Emit("XCHGQ", _IP, _BP) + self.load(_IP) } func (self *_ValueDecoder) call_c(fn obj.Addr) { @@ -286,7 +283,7 @@ func (self *_ValueDecoder) compile() { self.Emit("LEAQ", _VAR_ss, _CX) // LEAQ ss, CX self.Emit("MOVQ", _VAR_df, _R8) // MOVQ $df, R8 self.Emit("BTSQ", jit.Imm(_F_allow_control), _R8) // ANDQ $1<<_F_allow_control, R8 - self.callc(_F_value) // CALL value + self.callc(_F_value) // CALL value self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC /* check for errors */ @@ -720,46 +717,6 @@ func (self *_ValueDecoder) compile() { } } -func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) { - self.Emit("MOVQ", _V_writeBarrier, _R9) - self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0)) - self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - if saveDI { - self.save(_DI) - } - self.Emit("LEAQ", rec, _DI) - self.call(_F_gcWriteBarrierAX) - if saveDI { - self.load(_DI) - } - self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", _AX, rec) - self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") -} - -func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) { - if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX { - panic("rec contains AX!") - } - self.Emit("MOVQ", _V_writeBarrier, _AX) - self.Emit("CMPL", jit.Ptr(_AX, 0), jit.Imm(0)) - self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", ptr, _AX) - if saveDI { - self.save(_DI) - } - self.Emit("LEAQ", rec, _DI) - self.call(_F_gcWriteBarrierAX) - if saveDI { - self.load(_DI) - } - self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", ptr, rec) - self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") -} - /** Generic Decoder **/ var ( diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/generic_amd64_go117_test.s b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/generic_regabi_amd64_test.s similarity index 97% rename from vendor/github.com/bytedance/sonic/internal/decoder/generic_amd64_go117_test.s rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/generic_regabi_amd64_test.s index 6c2686de9ae3320bc7e93fad8d3c6d2abdb8c8c9..19ed3752f8e86c054f23bfbffd1b294191e2680e 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/generic_amd64_go117_test.s +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/generic_regabi_amd64_test.s @@ -1,4 +1,4 @@ -// +build go1.17,!go1.21 +// +build go1.17,!go1.24 // // Copyright 2021 ByteDance Inc. diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/pools.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/pools.go similarity index 95% rename from vendor/github.com/bytedance/sonic/internal/decoder/pools.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/pools.go index 06adc7fa1461bb1f91d3750e6ec5a4c7e84c769a..01868cb2fa1ea6e9ba680270193e12b601e9985b 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/pools.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/pools.go @@ -14,7 +14,7 @@ * limitations under the License. */ -package decoder +package jitdec import ( `sync` @@ -29,13 +29,14 @@ const ( _MinSlice = 2 _MaxStack = 4096 // 4k slots _MaxStackBytes = _MaxStack * _PtrBytes - _MaxDigitNums = 800 // used in atof fallback algorithm + _MaxDigitNums = types.MaxDigitNums // used in atof fallback algorithm ) const ( _PtrBytes = _PTR_SIZE / 8 _FsmOffset = (_MaxStack + 1) * _PtrBytes _DbufOffset = _FsmOffset + int64(unsafe.Sizeof(types.StateMachine{})) + types.MAX_RECURSE * _PtrBytes + _EpOffset = _DbufOffset + _MaxDigitNums _StackSize = unsafe.Sizeof(_Stack{}) ) @@ -53,6 +54,7 @@ type _Stack struct { mm types.StateMachine vp [types.MAX_RECURSE]unsafe.Pointer dp [_MaxDigitNums]byte + ep unsafe.Pointer } type _Decoder func( @@ -140,4 +142,4 @@ func findOrCompile(vt *rt.GoType) (_Decoder, error) { } else { return nil, err } -} \ No newline at end of file +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/primitives.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/primitives.go similarity index 84% rename from vendor/github.com/bytedance/sonic/internal/decoder/primitives.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/primitives.go index d6053e2cb4d58f1710f069eb6dfbc3c031edfaf9..5adfc038aca0d9e13ed1991dec755383415dc2d3 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/primitives.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/primitives.go @@ -14,7 +14,7 @@ * limitations under the License. */ -package decoder +package jitdec import ( `encoding` @@ -30,9 +30,7 @@ func decodeTypedPointer(s string, i int, vt *rt.GoType, vp unsafe.Pointer, sb *_ return 0, err } else { rt.MoreStack(_FP_size + _VD_size + native.MaxFrameSize) - rt.StopProf() ret, err := fn(s, i, vp, sb, fv, "", nil) - rt.StartProf() return ret, err } } @@ -41,6 +39,13 @@ func decodeJsonUnmarshaler(vv interface{}, s string) error { return vv.(json.Unmarshaler).UnmarshalJSON(rt.Str2Mem(s)) } +func decodeJsonUnmarshalerQuoted(vv interface{}, s string) error { + if len(s) < 2 || s[0] != '"' || s[len(s)-1] != '"' { + return &MismatchTypeError{} + } + return vv.(json.Unmarshaler).UnmarshalJSON(rt.Str2Mem(s[1:len(s)-1])) +} + func decodeTextUnmarshaler(vv interface{}, s string) error { return vv.(encoding.TextUnmarshaler).UnmarshalText(rt.Str2Mem(s)) } diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/stubs_go120.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/stubs_go116.go similarity index 91% rename from vendor/github.com/bytedance/sonic/internal/decoder/stubs_go120.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/stubs_go116.go index cde6a19728cab5177acadb82be8d1a2984f7b59f..8fa7c32fcd4255e2c68f76fe45fbf02614180edd 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/stubs_go120.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/stubs_go116.go @@ -1,4 +1,4 @@ -// +build go1.20 +// +build go1.17,!go1.20 /* * Copyright 2021 ByteDance Inc. @@ -16,18 +16,18 @@ * limitations under the License. */ -package decoder +package jitdec import ( `unsafe` `reflect` - _ `github.com/chenzhuoyu/base64x` + _ `github.com/cloudwego/base64x` `github.com/bytedance/sonic/internal/rt` ) -//go:linkname _subr__b64decode github.com/chenzhuoyu/base64x._subr__b64decode +//go:linkname _subr__b64decode github.com/cloudwego/base64x._subr__b64decode var _subr__b64decode uintptr // runtime.maxElementSize @@ -72,11 +72,6 @@ func mallocgc(size uintptr, typ *rt.GoType, needzero bool) unsafe.Pointer //goland:noinspection GoUnusedParameter func makeslice(et *rt.GoType, len int, cap int) unsafe.Pointer -//go:noescape -//go:linkname growslice reflect.growslice -//goland:noinspection GoUnusedParameter -func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice - //go:linkname makemap_small runtime.makemap_small func makemap_small() unsafe.Pointer diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/stubs_go115.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/stubs_go120.go similarity index 78% rename from vendor/github.com/bytedance/sonic/internal/decoder/stubs_go115.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/stubs_go120.go index 1a0917c3c3dd15668d8d6c332311d9a2c4a413ae..a6dad26d75eddcad37c931279cec72c08658ae68 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/stubs_go115.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/stubs_go120.go @@ -1,4 +1,4 @@ -// +build go1.15,!go1.20 +// +build go1.20 /* * Copyright 2021 ByteDance Inc. @@ -16,18 +16,18 @@ * limitations under the License. */ -package decoder +package jitdec import ( `unsafe` `reflect` - _ `github.com/chenzhuoyu/base64x` + _ `github.com/cloudwego/base64x` `github.com/bytedance/sonic/internal/rt` ) -//go:linkname _subr__b64decode github.com/chenzhuoyu/base64x._subr__b64decode +//go:linkname _subr__b64decode github.com/cloudwego/base64x._subr__b64decode var _subr__b64decode uintptr // runtime.maxElementSize @@ -72,33 +72,28 @@ func mallocgc(size uintptr, typ *rt.GoType, needzero bool) unsafe.Pointer //goland:noinspection GoUnusedParameter func makeslice(et *rt.GoType, len int, cap int) unsafe.Pointer -//go:noescape -//go:linkname growslice runtime.growslice -//goland:noinspection GoUnusedParameter -func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice - //go:linkname makemap_small runtime.makemap_small func makemap_small() unsafe.Pointer //go:linkname mapassign runtime.mapassign //goland:noinspection GoUnusedParameter -func mapassign(t *rt.GoType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer +func mapassign(t *rt.GoMapType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer //go:linkname mapassign_fast32 runtime.mapassign_fast32 //goland:noinspection GoUnusedParameter -func mapassign_fast32(t *rt.GoType, h unsafe.Pointer, k uint32) unsafe.Pointer +func mapassign_fast32(t *rt.GoMapType, h unsafe.Pointer, k uint32) unsafe.Pointer //go:linkname mapassign_fast64 runtime.mapassign_fast64 //goland:noinspection GoUnusedParameter -func mapassign_fast64(t *rt.GoType, h unsafe.Pointer, k uint64) unsafe.Pointer +func mapassign_fast64(t *rt.GoMapType, h unsafe.Pointer, k uint64) unsafe.Pointer //go:linkname mapassign_fast64ptr runtime.mapassign_fast64ptr //goland:noinspection GoUnusedParameter -func mapassign_fast64ptr(t *rt.GoType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer +func mapassign_fast64ptr(t *rt.GoMapType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer //go:linkname mapassign_faststr runtime.mapassign_faststr //goland:noinspection GoUnusedParameter -func mapassign_faststr(t *rt.GoType, h unsafe.Pointer, s string) unsafe.Pointer +func mapassign_faststr(t *rt.GoMapType, h unsafe.Pointer, s string) unsafe.Pointer //go:nosplit //go:linkname memclrHasPointers runtime.memclrHasPointers @@ -108,4 +103,4 @@ func memclrHasPointers(ptr unsafe.Pointer, n uintptr) //go:noescape //go:linkname memclrNoHeapPointers runtime.memclrNoHeapPointers //goland:noinspection GoUnusedParameter -func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr) \ No newline at end of file +func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr) diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/types.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/types.go similarity index 99% rename from vendor/github.com/bytedance/sonic/internal/decoder/types.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/types.go index 6fc0e706cef32c9b64d58d20ed1aad95e7c00036..c196eb5b780a34c3dd2c97caddc8839f453c37e7 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/types.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/types.go @@ -14,7 +14,7 @@ * limitations under the License. */ -package decoder +package jitdec import ( `encoding` diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/utils.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/utils.go similarity index 98% rename from vendor/github.com/bytedance/sonic/internal/decoder/utils.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/utils.go index 23ee5d5018ce00cba3daa30ea4b98a066c26b3f1..0a7a202892ba53abdda955ab8932c80cec5f0b93 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/utils.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/utils.go @@ -14,7 +14,7 @@ * limitations under the License. */ -package decoder +package jitdec import ( `unsafe` diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/compile_struct.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/compile_struct.go new file mode 100644 index 0000000000000000000000000000000000000000..713fb65610e18a6b0e9216bbb5861c6a82a8402d --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/compile_struct.go @@ -0,0 +1,174 @@ +package optdec + +import ( + "fmt" + "reflect" + + caching "github.com/bytedance/sonic/internal/optcaching" + "github.com/bytedance/sonic/internal/rt" + "github.com/bytedance/sonic/internal/resolver" +) + +const ( + _MAX_FIELDS = 50 // cutoff at 50 fields struct +) + +func (c *compiler) compileIntStringOption(vt reflect.Type) decFunc { + switch vt.Size() { + case 4: + switch vt.Kind() { + case reflect.Uint: + fallthrough + case reflect.Uintptr: + return &u32StringDecoder{} + case reflect.Int: + return &i32StringDecoder{} + } + case 8: + switch vt.Kind() { + case reflect.Uint: + fallthrough + case reflect.Uintptr: + return &u64StringDecoder{} + case reflect.Int: + return &i64StringDecoder{} + } + default: + panic("not supported pointer size: " + fmt.Sprint(vt.Size())) + } + panic("unreachable") +} + +func isInteger(vt reflect.Type) bool { + switch vt.Kind() { + case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint, reflect.Uintptr, reflect.Int: return true + default: return false + } +} + +func (c *compiler) assertStringOptTypes(vt reflect.Type) { + if c.depth > _CompileMaxDepth { + panic(*stackOverflow) + } + + c.depth += 1 + defer func () { + c.depth -= 1 + }() + + if isInteger(vt) { + return + } + + switch vt.Kind() { + case reflect.String, reflect.Bool, reflect.Float32, reflect.Float64: + return + case reflect.Ptr: c.assertStringOptTypes(vt.Elem()) + default: + panicForInvalidStrType(vt) + } +} + +func (c *compiler) compileFieldStringOption(vt reflect.Type) decFunc { + c.assertStringOptTypes(vt) + unmDec := c.tryCompilePtrUnmarshaler(vt, true) + if unmDec != nil { + return unmDec + } + + switch vt.Kind() { + case reflect.String: + if vt == jsonNumberType { + return &numberStringDecoder{} + } + return &strStringDecoder{} + case reflect.Bool: + return &boolStringDecoder{} + case reflect.Int8: + return &i8StringDecoder{} + case reflect.Int16: + return &i16StringDecoder{} + case reflect.Int32: + return &i32StringDecoder{} + case reflect.Int64: + return &i64StringDecoder{} + case reflect.Uint8: + return &u8StringDecoder{} + case reflect.Uint16: + return &u16StringDecoder{} + case reflect.Uint32: + return &u32StringDecoder{} + case reflect.Uint64: + return &u64StringDecoder{} + case reflect.Float32: + return &f32StringDecoder{} + case reflect.Float64: + return &f64StringDecoder{} + case reflect.Uint: + fallthrough + case reflect.Uintptr: + fallthrough + case reflect.Int: + return c.compileIntStringOption(vt) + case reflect.Ptr: + return &ptrStrDecoder{ + typ: rt.UnpackType(vt.Elem()), + deref: c.compileFieldStringOption(vt.Elem()), + } + default: + panicForInvalidStrType(vt) + return nil + } +} + +func (c *compiler) compileStruct(vt reflect.Type) decFunc { + c.enter(vt) + defer c.exit(vt) + if c.namedPtr { + c.namedPtr = false + return c.compileStructBody(vt) + } + + if c.depth >= c.opts.MaxInlineDepth + 1 || (c.counts > 0 && vt.NumField() >= _MAX_FIELDS) { + return &recuriveDecoder{ + typ: rt.UnpackType(vt), + } + } else { + return c.compileStructBody(vt) + } +} + +func (c *compiler) compileStructBody(vt reflect.Type) decFunc { + fv := resolver.ResolveStruct(vt) + entries := make([]fieldEntry, 0, len(fv)) + + for _, f := range fv { + var dec decFunc + /* dealt with field tag options */ + if f.Opts&resolver.F_stringize != 0 { + dec = c.compileFieldStringOption(f.Type) + } else { + dec = c.compile(f.Type) + } + + /* deal with embedded pointer fields */ + if f.Path[0].Kind == resolver.F_deref { + dec = &embeddedFieldPtrDecoder{ + field: f, + fieldDec: dec, + fieldName: f.Name, + } + } + + entries = append(entries, fieldEntry{ + FieldMeta: f, + fieldDec: dec, + }) + } + return &structDecoder{ + fieldMap: caching.NewFieldLookup(fv), + fields: entries, + structName: vt.Name(), + typ: vt, + } +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/compiler.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/compiler.go new file mode 100644 index 0000000000000000000000000000000000000000..fd164af93b193975dd0021c4ad6bce4f07b22cb9 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/compiler.go @@ -0,0 +1,449 @@ +package optdec + +import ( + "encoding/json" + "fmt" + "reflect" + + "github.com/bytedance/sonic/option" + "github.com/bytedance/sonic/internal/rt" + "github.com/bytedance/sonic/internal/caching" +) + +var ( + programCache = caching.CreateProgramCache() +) + +func findOrCompile(vt *rt.GoType) (decFunc, error) { + makeDecoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) { + ret, err := newCompiler().compileType(vt.Pack()) + return ret, err + } + if val := programCache.Get(vt); val != nil { + return val.(decFunc), nil + } else if ret, err := programCache.Compute(vt, makeDecoder); err == nil { + return ret.(decFunc), nil + } else { + return nil, err + } +} + +type compiler struct { + visited map[reflect.Type]bool + depth int + counts int + opts option.CompileOptions + namedPtr bool +} + +func newCompiler() *compiler { + return &compiler{ + visited: make(map[reflect.Type]bool), + opts: option.DefaultCompileOptions(), + } +} + +func (self *compiler) apply(opts option.CompileOptions) *compiler { + self.opts = opts + return self +} + +const _CompileMaxDepth = 4096 + +func (c *compiler) enter(vt reflect.Type) { + c.visited[vt] = true + c.depth += 1 + + if c.depth > _CompileMaxDepth { + panic(*stackOverflow) + } +} + +func (c *compiler) exit(vt reflect.Type) { + c.visited[vt] = false + c.depth -= 1 +} + +func (c *compiler) compileInt(vt reflect.Type) decFunc { + switch vt.Size() { + case 4: + switch vt.Kind() { + case reflect.Uint: + fallthrough + case reflect.Uintptr: + return &u32Decoder{} + case reflect.Int: + return &i32Decoder{} + } + case 8: + switch vt.Kind() { + case reflect.Uint: + fallthrough + case reflect.Uintptr: + return &u64Decoder{} + case reflect.Int: + return &i64Decoder{} + } + default: + panic("not supported pointer size: " + fmt.Sprint(vt.Size())) + } + panic("unreachable") +} + +func (c *compiler) rescue(ep *error) { + if val := recover(); val != nil { + if err, ok := val.(error); ok { + *ep = err + } else { + panic(val) + } + } +} + +func (c *compiler) compileType(vt reflect.Type) (rt decFunc, err error) { + defer c.rescue(&err) + rt = c.compile(vt) + return rt, err +} + +func (c *compiler) compile(vt reflect.Type) decFunc { + if c.visited[vt] { + return &recuriveDecoder{ + typ: rt.UnpackType(vt), + } + } + + dec := c.tryCompilePtrUnmarshaler(vt, false) + if dec != nil { + return dec + } + + return c.compileBasic(vt) +} + +func (c *compiler) compileBasic(vt reflect.Type) decFunc { + defer func() { + c.counts += 1 + }() + switch vt.Kind() { + case reflect.Bool: + return &boolDecoder{} + case reflect.Int8: + return &i8Decoder{} + case reflect.Int16: + return &i16Decoder{} + case reflect.Int32: + return &i32Decoder{} + case reflect.Int64: + return &i64Decoder{} + case reflect.Uint8: + return &u8Decoder{} + case reflect.Uint16: + return &u16Decoder{} + case reflect.Uint32: + return &u32Decoder{} + case reflect.Uint64: + return &u64Decoder{} + case reflect.Float32: + return &f32Decoder{} + case reflect.Float64: + return &f64Decoder{} + case reflect.Uint: + fallthrough + case reflect.Uintptr: + fallthrough + case reflect.Int: + return c.compileInt(vt) + case reflect.String: + return c.compileString(vt) + case reflect.Array: + return c.compileArray(vt) + case reflect.Interface: + return c.compileInterface(vt) + case reflect.Map: + return c.compileMap(vt) + case reflect.Ptr: + return c.compilePtr(vt) + case reflect.Slice: + return c.compileSlice(vt) + case reflect.Struct: + return c.compileStruct(vt) + default: + panic(&json.UnmarshalTypeError{Type: vt}) + } +} + +func (c *compiler) compilePtr(vt reflect.Type) decFunc { + c.enter(vt) + defer c.exit(vt) + + // specail logic for Named Ptr, issue 379 + if reflect.PtrTo(vt.Elem()) != vt { + c.namedPtr = true + return &ptrDecoder{ + typ: rt.UnpackType(vt.Elem()), + deref: c.compileBasic(vt.Elem()), + } + } + + return &ptrDecoder{ + typ: rt.UnpackType(vt.Elem()), + deref: c.compile(vt.Elem()), + } +} + +func (c *compiler) compileArray(vt reflect.Type) decFunc { + c.enter(vt) + defer c.exit(vt) + return &arrayDecoder{ + len: vt.Len(), + elemType: rt.UnpackType(vt.Elem()), + elemDec: c.compile(vt.Elem()), + typ: vt, + } +} + +func (c *compiler) compileString(vt reflect.Type) decFunc { + if vt == jsonNumberType { + return &numberDecoder{} + } + return &stringDecoder{} + +} + +func (c *compiler) tryCompileSliceUnmarshaler(vt reflect.Type) decFunc { + pt := reflect.PtrTo(vt.Elem()) + if pt.Implements(jsonUnmarshalerType) { + return &sliceDecoder{ + elemType: rt.UnpackType(vt.Elem()), + elemDec: c.compile(vt.Elem()), + typ: vt, + } + } + + if pt.Implements(encodingTextUnmarshalerType) { + return &sliceDecoder{ + elemType: rt.UnpackType(vt.Elem()), + elemDec: c.compile(vt.Elem()), + typ: vt, + } + } + return nil +} + +func (c *compiler) compileSlice(vt reflect.Type) decFunc { + c.enter(vt) + defer c.exit(vt) + + // Some common slice, use a decoder, to avoid function calls + et := rt.UnpackType(vt.Elem()) + + /* first checking `[]byte` */ + if et.Kind() == reflect.Uint8 /* []byte */ { + return c.compileSliceBytes(vt) + } + + dec := c.tryCompileSliceUnmarshaler(vt) + if dec != nil { + return dec + } + + if vt == reflect.TypeOf([]interface{}{}) { + return &sliceEfaceDecoder{} + } + if et.IsInt32() { + return &sliceI32Decoder{} + } + if et.IsInt64() { + return &sliceI64Decoder{} + } + if et.IsUint32() { + return &sliceU32Decoder{} + } + if et.IsUint64() { + return &sliceU64Decoder{} + } + if et.Kind() == reflect.String { + return &sliceStringDecoder{} + } + + return &sliceDecoder{ + elemType: rt.UnpackType(vt.Elem()), + elemDec: c.compile(vt.Elem()), + typ: vt, + } +} + +func (c *compiler) compileSliceBytes(vt reflect.Type) decFunc { + ep := reflect.PtrTo(vt.Elem()) + + if ep.Implements(jsonUnmarshalerType) { + return &sliceBytesUnmarshalerDecoder{ + elemType: rt.UnpackType(vt.Elem()), + elemDec: c.compile(vt.Elem()), + typ: vt, + } + } + + if ep.Implements(encodingTextUnmarshalerType) { + return &sliceBytesUnmarshalerDecoder{ + elemType: rt.UnpackType(vt.Elem()), + elemDec: c.compile(vt.Elem()), + typ: vt, + } + } + + return &sliceBytesDecoder{} +} + +func (c *compiler) compileInterface(vt reflect.Type) decFunc { + c.enter(vt) + defer c.exit(vt) + if vt.NumMethod() == 0 { + return &efaceDecoder{} + } + + if vt.Implements(jsonUnmarshalerType) { + return &unmarshalJSONDecoder{ + typ: rt.UnpackType(vt), + } + } + + if vt.Implements(encodingTextUnmarshalerType) { + return &unmarshalTextDecoder{ + typ: rt.UnpackType(vt), + } + } + + return &ifaceDecoder{ + typ: rt.UnpackType(vt), + } +} + +func (c *compiler) compileMap(vt reflect.Type) decFunc { + c.enter(vt) + defer c.exit(vt) + // check the key unmarshaler at first + decKey := tryCompileKeyUnmarshaler(vt) + if decKey != nil { + return &mapDecoder{ + mapType: rt.MapType(rt.UnpackType(vt)), + keyDec: decKey, + elemDec: c.compile(vt.Elem()), + } + } + + // Most common map, use a decoder, to avoid function calls + if vt == reflect.TypeOf(map[string]interface{}{}) { + return &mapEfaceDecoder{} + } else if vt == reflect.TypeOf(map[string]string{}) { + return &mapStringDecoder{} + } + + // Some common integer map later + mt := rt.MapType(rt.UnpackType(vt)) + + if mt.Key.Kind() == reflect.String { + return &mapStrKeyDecoder{ + mapType: mt, + assign: rt.GetMapStrAssign(vt), + elemDec: c.compile(vt.Elem()), + } + } + + if mt.Key.IsInt64() { + return &mapI64KeyDecoder{ + mapType: mt, + elemDec: c.compile(vt.Elem()), + assign: rt.GetMap64Assign(vt), + } + } + + if mt.Key.IsInt32() { + return &mapI32KeyDecoder{ + mapType: mt, + elemDec: c.compile(vt.Elem()), + assign: rt.GetMap32Assign(vt), + } + } + + if mt.Key.IsUint64() { + return &mapU64KeyDecoder{ + mapType: mt, + elemDec: c.compile(vt.Elem()), + assign: rt.GetMap64Assign(vt), + } + } + + if mt.Key.IsUint32() { + return &mapU32KeyDecoder{ + mapType: mt, + elemDec: c.compile(vt.Elem()), + assign: rt.GetMap32Assign(vt), + } + } + + // Generic map + return &mapDecoder{ + mapType: mt, + keyDec: c.compileMapKey(vt), + elemDec: c.compile(vt.Elem()), + } +} + +func tryCompileKeyUnmarshaler(vt reflect.Type) decKey { + pt := reflect.PtrTo(vt.Key()) + + /* check for `encoding.TextUnmarshaler` with pointer receiver */ + if pt.Implements(encodingTextUnmarshalerType) { + return decodeKeyTextUnmarshaler + } + + /* not support map key with `json.Unmarshaler` */ + return nil +} + +func (c *compiler) compileMapKey(vt reflect.Type) decKey { + switch vt.Key().Kind() { + case reflect.Int8: + return decodeKeyI8 + case reflect.Int16: + return decodeKeyI16 + case reflect.Uint8: + return decodeKeyU8 + case reflect.Uint16: + return decodeKeyU16 + default: + panic(&json.UnmarshalTypeError{Type: vt}) + } +} + +// maybe vt is a named type, and not a pointer receiver, see issue 379 +func (c *compiler) tryCompilePtrUnmarshaler(vt reflect.Type, strOpt bool) decFunc { + pt := reflect.PtrTo(vt) + + /* check for `json.Unmarshaler` with pointer receiver */ + if pt.Implements(jsonUnmarshalerType) { + return &unmarshalJSONDecoder{ + typ: rt.UnpackType(pt), + strOpt: strOpt, + } + } + + /* check for `encoding.TextMarshaler` with pointer receiver */ + if pt.Implements(encodingTextUnmarshalerType) { + /* TextUnmarshal not support ,strig tag */ + if strOpt { + panicForInvalidStrType(vt) + } + return &unmarshalTextDecoder{ + typ: rt.UnpackType(pt), + } + } + + return nil +} + +func panicForInvalidStrType(vt reflect.Type) { + panic(error_type(rt.UnpackType(vt))) +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/const.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/const.go new file mode 100644 index 0000000000000000000000000000000000000000..77879fafee8e89e2ad1b926546f25fea36bf025a --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/const.go @@ -0,0 +1,60 @@ +package optdec + +import "math" + +/* +Copied from sonic-rs +// JSON Value Type +const NULL: u64 = 0; +const BOOL: u64 = 2; +const FALSE: u64 = BOOL; +const TRUE: u64 = (1 << 3) | BOOL; +const NUMBER: u64 = 3; +const UINT: u64 = NUMBER; +const SINT: u64 = (1 << 3) | NUMBER; +const REAL: u64 = (2 << 3) | NUMBER; +const RAWNUMBER: u64 = (3 << 3) | NUMBER; +const STRING: u64 = 4; +const STRING_COMMON: u64 = STRING; +const STRING_HASESCAPED: u64 = (1 << 3) | STRING; +const OBJECT: u64 = 6; +const ARRAY: u64 = 7; + +/// JSON Type Mask +const POS_MASK: u64 = (!0) << 32; +const POS_BITS: u64 = 32; +const TYPE_MASK: u64 = 0xFF; +const TYPE_BITS: u64 = 8; + +*/ + +const ( + // BasicType: 3 bits + KNull = 0 // xxxxx000 + KBool = 2 // xxxxx010 + KNumber = 3 // xxxxx011 + KString = 4 // xxxxx100 + KRaw = 5 // xxxxx101 + KObject = 6 // xxxxx110 + KArray = 7 // xxxxx111 + + // SubType: 2 bits + KFalse = (0 << 3) | KBool // xxx00_010, 2 + KTrue = (1 << 3) | KBool // xxx01_010, 10 + KUint = (0 << 3) | KNumber // xxx00_011, 3 + KSint = (1 << 3) | KNumber // xxx01_011, 11 + KReal = (2 << 3) | KNumber // xxx10_011, 19 + KRawNumber = (3 << 3) | KNumber // xxx11_011, 27 + KStringCommon = KString // xxx00_100, 4 + KStringEscaped = (1 << 3) | KString // xxx01_100, 12 +) + +const ( + PosMask = math.MaxUint64 << 32 + PosBits = 32 + TypeMask = 0xFF + TypeBits = 8 + + ConLenMask = uint64(math.MaxUint32) + ConLenBits = 32 +) diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/context.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/context.go new file mode 100644 index 0000000000000000000000000000000000000000..93ed9b7e09fc29f82ceea8080ee7280d40e5c657 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/context.go @@ -0,0 +1,3 @@ +package optdec + +type context = Context diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/decoder.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/decoder.go new file mode 100644 index 0000000000000000000000000000000000000000..81eed34ea56efe20f91ef6ad22dd8785aa9f6a94 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/decoder.go @@ -0,0 +1,160 @@ +package optdec + +import ( + "reflect" + "unsafe" + + "encoding/json" + "github.com/bytedance/sonic/internal/rt" + "github.com/bytedance/sonic/option" + "github.com/bytedance/sonic/internal/decoder/errors" + "github.com/bytedance/sonic/internal/decoder/consts" +) + + +type ( + MismatchTypeError = errors.MismatchTypeError + SyntaxError = errors.SyntaxError +) + +const ( + _F_allow_control = consts.F_allow_control + _F_copy_string = consts.F_copy_string + _F_disable_unknown = consts.F_disable_unknown + _F_disable_urc = consts.F_disable_urc + _F_use_int64 = consts.F_use_int64 + _F_use_number = consts.F_use_number + _F_validate_string = consts.F_validate_string +) + +type Options = consts.Options + +const ( + OptionUseInt64 = consts.OptionUseInt64 + OptionUseNumber = consts.OptionUseNumber + OptionUseUnicodeErrors = consts.OptionUseUnicodeErrors + OptionDisableUnknown = consts.OptionDisableUnknown + OptionCopyString = consts.OptionCopyString + OptionValidateString = consts.OptionValidateString +) + + +func Decode(s *string, i *int, f uint64, val interface{}) error { + vv := rt.UnpackEface(val) + vp := vv.Value + + /* check for nil type */ + if vv.Type == nil { + return &json.InvalidUnmarshalError{} + } + + /* must be a non-nil pointer */ + if vp == nil || vv.Type.Kind() != reflect.Ptr { + return &json.InvalidUnmarshalError{Type: vv.Type.Pack()} + } + + etp := rt.PtrElem(vv.Type) + + /* check the defined pointer type for issue 379 */ + if vv.Type.IsNamed() { + newp := vp + etp = vv.Type + vp = unsafe.Pointer(&newp) + } + + dec, err := findOrCompile(etp) + if err != nil { + return err + } + + /* parse into document */ + ctx, err := NewContext(*s, *i, uint64(f), etp) + defer ctx.Delete() + if ctx.Parser.Utf8Inv { + *s = ctx.Parser.Json + } + if err != nil { + goto fix_error; + } + err = dec.FromDom(vp, ctx.Root(), &ctx) + +fix_error: + err = fix_error(*s, *i, err) + + // update position at last + *i += ctx.Parser.Pos() + return err +} + +func fix_error(json string, pos int, err error) error { + if e, ok := err.(SyntaxError); ok { + return SyntaxError{ + Pos: int(e.Pos) + pos, + Src: json, + Msg: e.Msg, + } + } + + if e, ok := err.(MismatchTypeError); ok { + return &MismatchTypeError { + Pos: int(e.Pos) + pos, + Src: json, + Type: e.Type, + } + } + + return err +} + +// Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in +// order to reduce the first-hit latency. +// +// Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is +// a compile option to set the depth of recursive compile for the nested struct type. +func Pretouch(vt reflect.Type, opts ...option.CompileOption) error { + cfg := option.DefaultCompileOptions() + for _, opt := range opts { + opt(&cfg) + } + return pretouchRec(map[reflect.Type]bool{vt:true}, cfg) +} + +func pretouchType(_vt reflect.Type, opts option.CompileOptions) (map[reflect.Type]bool, error) { + /* compile function */ + compiler := newCompiler().apply(opts) + decoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) { + if f, err := compiler.compileType(_vt); err != nil { + return nil, err + } else { + return f, nil + } + } + + /* find or compile */ + vt := rt.UnpackType(_vt) + if val := programCache.Get(vt); val != nil { + return nil, nil + } else if _, err := programCache.Compute(vt, decoder); err == nil { + return compiler.visited, nil + } else { + return nil, err + } +} + +func pretouchRec(vtm map[reflect.Type]bool, opts option.CompileOptions) error { + if opts.RecursiveDepth < 0 || len(vtm) == 0 { + return nil + } + next := make(map[reflect.Type]bool) + for vt := range(vtm) { + sub, err := pretouchType(vt, opts) + if err != nil { + return err + } + for svt := range(sub) { + next[svt] = true + } + } + opts.RecursiveDepth -= 1 + return pretouchRec(next, opts) +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/errors.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/errors.go new file mode 100644 index 0000000000000000000000000000000000000000..db0af547bc4cbe21af1da8f654b8d99569c83535 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/errors.go @@ -0,0 +1,73 @@ +/* + * Copyright 2021 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + package optdec + + import ( + "encoding/json" + "errors" + "reflect" + "strconv" + + "github.com/bytedance/sonic/internal/rt" + ) + + /** JIT Error Helpers **/ + + var stackOverflow = &json.UnsupportedValueError{ + Str: "Value nesting too deep", + Value: reflect.ValueOf("..."), + } + + func error_type(vt *rt.GoType) error { + return &json.UnmarshalTypeError{Type: vt.Pack()} + } + + func error_mismatch(node Node, ctx *context, typ reflect.Type) error { + return MismatchTypeError{ + Pos: node.Position(), + Src: ctx.Parser.Json, + Type: typ, + } + } + + func newUnmatched(pos int, vt *rt.GoType) error { + return MismatchTypeError{ + Pos: pos, + Src: "", + Type: vt.Pack(), + } + } + + func error_field(name string) error { + return errors.New("json: unknown field " + strconv.Quote(name)) + } + + func error_value(value string, vtype reflect.Type) error { + return &json.UnmarshalTypeError{ + Type: vtype, + Value: value, + } + } + + func error_syntax(pos int, src string, msg string) error { + return SyntaxError{ + Pos: pos, + Src: src, + Msg: msg, + } + } + \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/functor.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/functor.go new file mode 100644 index 0000000000000000000000000000000000000000..2a0523d5ee4746763b66a58081cdf28dcd9801a1 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/functor.go @@ -0,0 +1,281 @@ +package optdec + +import ( + "encoding/json" + "math" + "unsafe" + + "github.com/bytedance/sonic/internal/rt" + "github.com/bytedance/sonic/internal/resolver" +) + +type decFunc interface { + FromDom(vp unsafe.Pointer, node Node, ctx *context) error +} + +type ptrDecoder struct { + typ *rt.GoType + deref decFunc +} + +// Pointer Value is allocated in the Caller +func (d *ptrDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + if *(*unsafe.Pointer)(vp) == nil { + *(*unsafe.Pointer)(vp) = rt.Mallocgc(d.typ.Size, d.typ, true) + } + + return d.deref.FromDom(*(*unsafe.Pointer)(vp), node, ctx) +} + +type embeddedFieldPtrDecoder struct { + field resolver.FieldMeta + fieldDec decFunc + fieldName string +} + +// Pointer Value is allocated in the Caller +func (d *embeddedFieldPtrDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + // seek into the pointer + vp = unsafe.Pointer(uintptr(vp) - uintptr(d.field.Path[0].Size)) + for _, f := range d.field.Path { + deref := rt.UnpackType(f.Type) + vp = unsafe.Pointer(uintptr(vp) + f.Size) + if f.Kind == resolver.F_deref { + if *(*unsafe.Pointer)(vp) == nil { + *(*unsafe.Pointer)(vp) = rt.Mallocgc(deref.Size, deref, true) + } + vp = *(*unsafe.Pointer)(vp) + } + } + return d.fieldDec.FromDom(vp, node, ctx) +} + +type i8Decoder struct{} + +func (d *i8Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsI64(ctx) + if !ok || ret > math.MaxInt8 || ret < math.MinInt8 { + return error_mismatch(node, ctx, int8Type) + } + + *(*int8)(vp) = int8(ret) + return nil +} + +type i16Decoder struct{} + +func (d *i16Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsI64(ctx) + if !ok || ret > math.MaxInt16 || ret < math.MinInt16 { + return error_mismatch(node, ctx, int16Type) + } + + *(*int16)(vp) = int16(ret) + return nil +} + +type i32Decoder struct{} + +func (d *i32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsI64(ctx) + if !ok || ret > math.MaxInt32 || ret < math.MinInt32 { + return error_mismatch(node, ctx, int32Type) + } + + *(*int32)(vp) = int32(ret) + return nil +} + +type i64Decoder struct{} + +func (d *i64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsI64(ctx) + if !ok { + return error_mismatch(node, ctx, int64Type) + } + + *(*int64)(vp) = int64(ret) + return nil +} + +type u8Decoder struct{} + +func (d *u8Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsU64(ctx) + if !ok || ret > math.MaxUint8 { + err := error_mismatch(node, ctx, uint8Type) + return err + } + + *(*uint8)(vp) = uint8(ret) + return nil +} + +type u16Decoder struct{} + +func (d *u16Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsU64(ctx) + if !ok || ret > math.MaxUint16 { + return error_mismatch(node, ctx, uint16Type) + } + *(*uint16)(vp) = uint16(ret) + return nil +} + +type u32Decoder struct{} + +func (d *u32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsU64(ctx) + if !ok || ret > math.MaxUint32 { + return error_mismatch(node, ctx, uint32Type) + } + + *(*uint32)(vp) = uint32(ret) + return nil +} + +type u64Decoder struct{} + +func (d *u64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsU64(ctx) + if !ok { + return error_mismatch(node, ctx, uint64Type) + } + + *(*uint64)(vp) = uint64(ret) + return nil +} + +type f32Decoder struct{} + +func (d *f32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsF64(ctx) + if !ok || ret > math.MaxFloat32 || ret < -math.MaxFloat32 { + return error_mismatch(node, ctx, float32Type) + } + + *(*float32)(vp) = float32(ret) + return nil +} + +type f64Decoder struct{} + +func (d *f64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsF64(ctx) + if !ok { + return error_mismatch(node, ctx, float64Type) + } + + *(*float64)(vp) = float64(ret) + return nil +} + +type boolDecoder struct { +} + +func (d *boolDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsBool() + if !ok { + return error_mismatch(node, ctx, boolType) + } + + *(*bool)(vp) = bool(ret) + return nil +} + +type stringDecoder struct { +} + +func (d *stringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsStr(ctx) + if !ok { + return error_mismatch(node, ctx, stringType) + } + *(*string)(vp) = ret + return nil +} + +type numberDecoder struct { +} + +func (d *numberDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + num, ok := node.AsNumber(ctx) + if !ok { + return error_mismatch(node, ctx, jsonNumberType) + } + *(*json.Number)(vp) = num + return nil +} + +type recuriveDecoder struct { + typ *rt.GoType +} + +func (d *recuriveDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + dec, err := findOrCompile(d.typ) + if err != nil { + return err + } + return dec.FromDom(vp, node, ctx) +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/helper.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/helper.go new file mode 100644 index 0000000000000000000000000000000000000000..143fa6708ab79ec6117b03f2f6924200376d1f4a --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/helper.go @@ -0,0 +1,105 @@ +package optdec + +import ( + "encoding/json" + "strconv" + + "github.com/bytedance/sonic/internal/native" + "github.com/bytedance/sonic/internal/utils" + "github.com/bytedance/sonic/internal/native/types" +) + + +func SkipNumberFast(json string, start int) (int, error) { + // find the number ending, we pasred in native, it alway valid + pos := start + for pos < len(json) && json[pos] != ']' && json[pos] != '}' && json[pos] != ',' { + if json[pos] >= '0' && json[pos] <= '9' || json[pos] == '.' || json[pos] == '-' || json[pos] == '+' || json[pos] == 'e' || json[pos] == 'E' { + pos += 1 + } else { + break + } + } + return pos, nil +} + + +func isSpace(c byte) bool { + return c == ' ' || c == '\t' || c == '\n' || c == '\r' +} + +// pos is the start index of the raw +func ValidNumberFast(raw string) bool { + ret := utils.SkipNumber(raw, 0) + if ret < 0 { + return false + } + + // check trainling chars + for ret < len(raw) { + return false + } + + return true +} + +func SkipOneFast2(json string, pos *int) (int, error) { + // find the number ending, we pasred in sonic-cpp, it alway valid + start := native.SkipOneFast(&json, pos) + if start < 0 { + return -1, error_syntax(*pos, json, types.ParsingError(-start).Error()) + } + return start, nil +} + +func SkipOneFast(json string, pos int) (string, error) { + // find the number ending, we pasred in sonic-cpp, it alway valid + start := native.SkipOneFast(&json, &pos) + if start < 0 { + // TODO: details error code + return "", error_syntax(pos, json, types.ParsingError(-start).Error()) + } + return json[start:pos], nil +} + +func ParseI64(raw string) (int64, error) { + i64, err := strconv.ParseInt(raw, 10, 64) + if err != nil { + return 0, err + } + return i64, nil +} + +func ParseBool(raw string) (bool, error) { + var b bool + err := json.Unmarshal([]byte(raw), &b) + if err != nil { + return false, err + } + return b, nil +} + +func ParseU64(raw string) (uint64, error) { + u64, err := strconv.ParseUint(raw, 10, 64) + if err != nil { + return 0, err + } + return u64, nil +} + +func ParseF64(raw string) (float64, error) { + f64, err := strconv.ParseFloat(raw, 64) + if err != nil { + return 0, err + } + return f64, nil +} + +func Unquote(raw string) (string, error) { + var u string + err := json.Unmarshal([]byte(raw), &u) + if err != nil { + return "", err + } + return u, nil +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/interface.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/interface.go new file mode 100644 index 0000000000000000000000000000000000000000..0c063d55f6aee7fbcfcf5cae93c4ba6fe88e158d --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/interface.go @@ -0,0 +1,169 @@ +package optdec + +import ( + "encoding" + "encoding/json" + "unsafe" + "reflect" + + "github.com/bytedance/sonic/internal/rt" +) + +type efaceDecoder struct { +} + +func (d *efaceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*interface{})(vp) = interface{}(nil) + return nil + } + + eface := *(*rt.GoEface)(vp) + + // not pointer type, or nil pointer, or *interface{} + if eface.Value == nil || eface.Type.Kind() != reflect.Ptr || rt.PtrElem(eface.Type) == anyType { + ret, err := node.AsEface(ctx) + if err != nil { + return err + } + + *(*interface{})(vp) = ret + return nil + } + + etp := rt.PtrElem(eface.Type) + vp = eface.Value + + /* check the defined pointer type for issue 379 */ + if eface.Type.IsNamed() { + newp := vp + etp = eface.Type + vp = unsafe.Pointer(&newp) + } + + dec, err := findOrCompile(etp) + if err != nil { + return err + } + + return dec.FromDom(vp, node, ctx) +} + +type ifaceDecoder struct { + typ *rt.GoType +} + +func (d *ifaceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + iface := *(*rt.GoIface)(vp) + if iface.Itab == nil { + return error_type(d.typ) + } + + vt := iface.Itab.Vt + + // not pointer type, or nil pointer, or *interface{} + if vp == nil || vt.Kind() != reflect.Ptr || rt.PtrElem(vt) == anyType { + ret, err := node.AsEface(ctx) + if err != nil { + return err + } + + *(*interface{})(vp) = ret + return nil + } + + + etp := rt.PtrElem(vt) + vp = iface.Value + + /* check the defined pointer type for issue 379 */ + if vt.IsNamed() { + newp := vp + etp = vt + vp = unsafe.Pointer(&newp) + } + + dec, err := findOrCompile(etp) + if err != nil { + return err + } + + return dec.FromDom(vp, node, ctx) +} + +type unmarshalTextDecoder struct { + typ *rt.GoType +} + +func (d *unmarshalTextDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + txt, ok := node.AsStringText(ctx) + if !ok { + return error_mismatch(node, ctx, d.typ.Pack()) + } + + v := *(*interface{})(unsafe.Pointer(&rt.GoEface{ + Type: d.typ, + Value: vp, + })) + + // fast path + if u, ok := v.(encoding.TextUnmarshaler); ok { + return u.UnmarshalText(txt) + } + + // slow path + rv := reflect.ValueOf(v) + if u, ok := rv.Interface().(encoding.TextUnmarshaler); ok { + return u.UnmarshalText(txt) + } + + return error_type(d.typ) +} + +type unmarshalJSONDecoder struct { + typ *rt.GoType + strOpt bool +} + +func (d *unmarshalJSONDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + v := *(*interface{})(unsafe.Pointer(&rt.GoEface{ + Type: d.typ, + Value: vp, + })) + + var input []byte + if d.strOpt && node.IsNull() { + input = []byte("null") + } else if d.strOpt { + s, ok := node.AsStringText(ctx) + if !ok { + return error_mismatch(node, ctx, d.typ.Pack()) + } + input = s + } else { + input = []byte(node.AsRaw(ctx)) + } + + // fast path + if u, ok := v.(json.Unmarshaler); ok { + return u.UnmarshalJSON((input)) + } + + // slow path + rv := reflect.ValueOf(v) + if u, ok := rv.Interface().(json.Unmarshaler); ok { + return u.UnmarshalJSON(input) + } + + return error_type(d.typ) +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/map.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/map.go new file mode 100644 index 0000000000000000000000000000000000000000..1a2bda8f3fad5d0a71d3824b7d7abaac64ece3c7 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/map.go @@ -0,0 +1,430 @@ +package optdec + +import ( + "encoding" + "encoding/json" + "math" + "reflect" + "unsafe" + + "github.com/bytedance/sonic/internal/rt" +) + +/** Decoder for most common map types: map[string]interface{}, map[string]string **/ + +type mapEfaceDecoder struct { +} + +func (d *mapEfaceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*map[string]interface{})(vp) = nil + return nil + } + + return node.AsMapEface(ctx, vp) +} + +type mapStringDecoder struct { +} + +func (d *mapStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*map[string]string)(vp) = nil + return nil + } + + return node.AsMapString(ctx, vp) +} + +/** Decoder for map with string key **/ + +type mapStrKeyDecoder struct { + mapType *rt.GoMapType + elemDec decFunc + assign rt.MapStrAssign + typ reflect.Type +} + +func (d *mapStrKeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + obj, ok := node.AsObj() + if !ok { + return error_mismatch(node, ctx, d.mapType.Pack()) + } + + // allocate map + m := *(*unsafe.Pointer)(vp) + if m == nil { + m = rt.Makemap(&d.mapType.GoType, obj.Len()) + } + + var gerr error + next := obj.Children() + for i := 0; i < obj.Len(); i++ { + keyn := NewNode(next) + key, _ := keyn.AsStr(ctx) + + valn := NewNode(PtrOffset(next, 1)) + valp := d.assign(d.mapType, m, key) + err := d.elemDec.FromDom(valp, valn, ctx) + if gerr == nil && err != nil { + gerr = err + } + next = valn.Next() + } + + *(*unsafe.Pointer)(vp) = m + return gerr +} + +/** Decoder for map with int32 or int64 key **/ + +type mapI32KeyDecoder struct { + mapType *rt.GoMapType + elemDec decFunc + assign rt.Map32Assign +} + +func (d *mapI32KeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + obj, ok := node.AsObj() + if !ok { + return error_mismatch(node, ctx, d.mapType.Pack()) + } + + // allocate map + m := *(*unsafe.Pointer)(vp) + if m == nil { + m = rt.Makemap(&d.mapType.GoType, obj.Len()) + } + + next := obj.Children() + var gerr error + for i := 0; i < obj.Len(); i++ { + keyn := NewNode(next) + k, ok := keyn.ParseI64(ctx) + if !ok || k > math.MaxInt32 || k < math.MinInt32 { + if gerr == nil { + gerr = error_mismatch(keyn, ctx, d.mapType.Pack()) + } + valn := NewNode(PtrOffset(next, 1)) + next = valn.Next() + continue + } + + key := int32(k) + ku32 := *(*uint32)(unsafe.Pointer(&key)) + valn := NewNode(PtrOffset(next, 1)) + valp := d.assign(d.mapType, m, ku32) + err := d.elemDec.FromDom(valp, valn, ctx) + if gerr == nil && err != nil { + gerr = err + } + + next = valn.Next() + } + + *(*unsafe.Pointer)(vp) = m + return gerr +} + +type mapI64KeyDecoder struct { + mapType *rt.GoMapType + elemDec decFunc + assign rt.Map64Assign +} + +func (d *mapI64KeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + obj, ok := node.AsObj() + if !ok { + return error_mismatch(node, ctx, d.mapType.Pack()) + } + + // allocate map + m := *(*unsafe.Pointer)(vp) + if m == nil { + m = rt.Makemap(&d.mapType.GoType, obj.Len()) + } + + var gerr error + next := obj.Children() + for i := 0; i < obj.Len(); i++ { + keyn := NewNode(next) + key, ok := keyn.ParseI64(ctx) + + if !ok { + if gerr == nil { + gerr = error_mismatch(keyn, ctx, d.mapType.Pack()) + } + valn := NewNode(PtrOffset(next, 1)) + next = valn.Next() + continue + } + + ku64 := *(*uint64)(unsafe.Pointer(&key)) + valn := NewNode(PtrOffset(next, 1)) + valp := d.assign(d.mapType, m, ku64) + err := d.elemDec.FromDom(valp, valn, ctx) + if gerr == nil && err != nil { + gerr = err + } + next = valn.Next() + } + + *(*unsafe.Pointer)(vp) = m + return gerr +} + +/** Decoder for map with unt32 or uint64 key **/ + +type mapU32KeyDecoder struct { + mapType *rt.GoMapType + elemDec decFunc + assign rt.Map32Assign +} + +func (d *mapU32KeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + obj, ok := node.AsObj() + if !ok { + return error_mismatch(node, ctx, d.mapType.Pack()) + } + + // allocate map + m := *(*unsafe.Pointer)(vp) + if m == nil { + m = rt.Makemap(&d.mapType.GoType, obj.Len()) + } + + var gerr error + next := obj.Children() + for i := 0; i < obj.Len(); i++ { + keyn := NewNode(next) + k, ok := keyn.ParseU64(ctx) + if !ok || k > math.MaxUint32 { + if gerr == nil { + gerr = error_mismatch(keyn, ctx, d.mapType.Pack()) + } + valn := NewNode(PtrOffset(next, 1)) + next = valn.Next() + continue + } + + key := uint32(k) + valn := NewNode(PtrOffset(next, 1)) + valp := d.assign(d.mapType, m, key) + err := d.elemDec.FromDom(valp, valn, ctx) + if gerr == nil && err != nil { + gerr = err + } + next = valn.Next() + } + + *(*unsafe.Pointer)(vp) = m + return gerr +} + +type mapU64KeyDecoder struct { + mapType *rt.GoMapType + elemDec decFunc + assign rt.Map64Assign +} + +func (d *mapU64KeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + obj, ok := node.AsObj() + if !ok { + return error_mismatch(node, ctx, d.mapType.Pack()) + } + // allocate map + m := *(*unsafe.Pointer)(vp) + if m == nil { + m = rt.Makemap(&d.mapType.GoType, obj.Len()) + } + + var gerr error + next := obj.Children() + for i := 0; i < obj.Len(); i++ { + keyn := NewNode(next) + key, ok := keyn.ParseU64(ctx) + if !ok { + if gerr == nil { + gerr = error_mismatch(keyn, ctx, d.mapType.Pack()) + } + valn := NewNode(PtrOffset(next, 1)) + next = valn.Next() + continue + } + + valn := NewNode(PtrOffset(next, 1)) + valp := d.assign(d.mapType, m, key) + err := d.elemDec.FromDom(valp, valn, ctx) + if gerr == nil && err != nil { + gerr = err + } + next = valn.Next() + } + + *(*unsafe.Pointer)(vp) = m + return gerr +} + +/** Decoder for generic cases */ + +type decKey func(dec *mapDecoder, raw string, ctx *context) (interface{}, error) + +func decodeKeyU8(dec *mapDecoder, raw string, ctx *context) (interface{}, error) { + key, err := Unquote(raw) + if err != nil { + return nil, err + } + ret, err := ParseU64(key) + if err != nil { + return nil, err + } + if ret > math.MaxUint8 { + return nil, error_value(key, dec.mapType.Key.Pack()) + } + return uint8(ret), nil +} + +func decodeKeyU16(dec *mapDecoder, raw string, ctx *context) (interface{}, error) { + key, err := Unquote(raw) + if err != nil { + return nil, err + } + ret, err := ParseU64(key) + if err != nil { + return nil, err + } + if ret > math.MaxUint16 { + return nil, error_value(key, dec.mapType.Key.Pack()) + } + return uint16(ret), nil +} + +func decodeKeyI8(dec *mapDecoder, raw string, ctx *context) (interface{}, error) { + key, err := Unquote(raw) + if err != nil { + return nil, err + } + ret, err := ParseI64(key) + if err != nil { + return nil, err + } + if ret > math.MaxInt8 || ret < math.MinInt8 { + return nil, error_value(key, dec.mapType.Key.Pack()) + } + return int8(ret), nil +} + +func decodeKeyI16(dec *mapDecoder, raw string, ctx *context) (interface{}, error) { + key, err := Unquote(raw) + if err != nil { + return nil, err + } + ret, err := ParseI64(key) + if err != nil { + return nil, err + } + if ret > math.MaxInt16 || ret < math.MinInt16 { + return nil, error_value(key, dec.mapType.Key.Pack()) + } + return int16(ret), nil +} + +func decodeKeyJSONUnmarshaler(dec *mapDecoder, raw string, _ *context) (interface{}, error) { + ret := reflect.New(dec.mapType.Key.Pack()).Interface() + err := ret.(json.Unmarshaler).UnmarshalJSON([]byte(raw)) + if err != nil { + return nil, err + } + return ret, nil +} + +func decodeKeyTextUnmarshaler(dec *mapDecoder, raw string, ctx *context) (interface{}, error) { + key, err := Unquote(raw) + if err != nil { + return nil, err + } + ret := reflect.New(dec.mapType.Key.Pack()).Interface() + err = ret.(encoding.TextUnmarshaler).UnmarshalText([]byte(key)) + if err != nil { + return nil, err + } + return ret, nil +} + +type mapDecoder struct { + mapType *rt.GoMapType + keyDec decKey + elemDec decFunc +} + +func (d *mapDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + obj, ok := node.AsObj() + if !ok { + return error_mismatch(node, ctx, d.mapType.Pack()) + } + + // allocate map + m := *(*unsafe.Pointer)(vp) + if m == nil { + m = rt.Makemap(&d.mapType.GoType, obj.Len()) + } + + next := obj.Children() + var gerr error + for i := 0; i < obj.Len(); i++ { + keyn := NewNode(next) + raw := keyn.AsRaw(ctx) + key, err := d.keyDec(d, raw, ctx) + if err != nil { + if gerr == nil { + gerr = error_mismatch(keyn, ctx, d.mapType.Pack()) + } + valn := NewNode(PtrOffset(next, 1)) + next = valn.Next() + continue + } + + valn := NewNode(PtrOffset(next, 1)) + keyp := rt.UnpackEface(key).Value + valp := rt.Mapassign(d.mapType, m, keyp) + err = d.elemDec.FromDom(valp, valn, ctx) + if gerr == nil && err != nil { + gerr = err + } + + next = valn.Next() + } + + *(*unsafe.Pointer)(vp) = m + return gerr +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/native.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/native.go new file mode 100644 index 0000000000000000000000000000000000000000..29a0136ae4c30e527d7167cb4081ac235bcf5eae --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/native.go @@ -0,0 +1,269 @@ +package optdec + +import ( + "fmt" + "reflect" + "unsafe" + + "sync" + + "github.com/bytedance/sonic/internal/native" + "github.com/bytedance/sonic/internal/native/types" + "github.com/bytedance/sonic/internal/rt" + "github.com/bytedance/sonic/utf8" +) + + +type ErrorCode int + +const ( + SONIC_OK = 0; + SONIC_CONTROL_CHAR = 1; + SONIC_INVALID_ESCAPED = 2; + SONIC_INVALID_NUM = 3; + SONIC_FLOAT_INF = 4; + SONIC_EOF = 5; + SONIC_INVALID_CHAR = 6; + SONIC_EXPECT_KEY = 7; + SONIC_EXPECT_COLON = 8; + SONIC_EXPECT_OBJ_COMMA_OR_END = 9; + SONIC_EXPECT_ARR_COMMA_OR_END = 10; + SONIC_VISIT_FAILED = 11; + SONIC_INVALID_ESCAPED_UTF = 12; + SONIC_INVALID_LITERAL = 13; + SONIC_STACK_OVERFLOW = 14; +) + +var ParsingErrors = []string{ + SONIC_OK : "ok", + SONIC_CONTROL_CHAR : "control chars in string", + SONIC_INVALID_ESCAPED : "invalid escaped chars in string", + SONIC_INVALID_NUM : "invalid number", + SONIC_FLOAT_INF : "float infinity", + SONIC_EOF : "eof", + SONIC_INVALID_CHAR : "invalid chars", + SONIC_EXPECT_KEY : "expect a json key", + SONIC_EXPECT_COLON : "expect a `:`", + SONIC_EXPECT_OBJ_COMMA_OR_END : "expect a `,` or `}`", + SONIC_EXPECT_ARR_COMMA_OR_END : "expect a `,` or `]`", + SONIC_VISIT_FAILED : "failed in json visitor", + SONIC_INVALID_ESCAPED_UTF : "invalid escaped unicodes", + SONIC_INVALID_LITERAL : "invalid literal(true/false/null)", + SONIC_STACK_OVERFLOW : "json is exceeded max depth 4096, cause stack overflow", +} + +func (code ErrorCode) Error() string { + return ParsingErrors[code] +} + +type node struct { + typ uint64 + val uint64 +} + +// should consitent with native/parser.c +type _nospaceBlock struct { + _ [8]byte + _ [8]byte +} + +// should consitent with native/parser.c +type nodeBuf struct { + ncur uintptr + parent int64 + depth uint64 + nstart uintptr + nend uintptr + stat jsonStat +} + +func (self *nodeBuf) init(nodes []node) { + self.ncur = uintptr(unsafe.Pointer(&nodes[0])) + self.nstart = self.ncur + self.nend = self.ncur + uintptr(cap(nodes)) * unsafe.Sizeof(node{}) + self.parent = -1 +} + +// should consitent with native/parser.c +type Parser struct { + Json string + padded []byte + nodes []node + dbuf []byte + backup []node + + options uint64 + // JSON cursor + start uintptr + cur uintptr + end uintptr + _nbk _nospaceBlock + + // node buffer cursor + nbuf nodeBuf + Utf8Inv bool + isEface bool +} + +// only when parse non-empty object/array are needed. +type jsonStat struct { + object uint32 + array uint32 + str uint32 + number uint32 + array_elems uint32 + object_keys uint32 + max_depth uint32 +} + + +var ( + defaultJsonPaddedCap uintptr = 1 << 20 // 1 Mb + defaultNodesCap uintptr = (1 << 20) / unsafe.Sizeof(node{}) // 1 Mb +) + +var parsePool sync.Pool = sync.Pool { + New: func () interface{} { + return &Parser{ + options: 0, + padded: make([]byte, 0, defaultJsonPaddedCap), + nodes: make([]node, defaultNodesCap, defaultNodesCap), + dbuf: make([]byte, types.MaxDigitNums, types.MaxDigitNums), + } + }, +} + +var padding string = "x\"x\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + +func newParser(data string, pos int, opt uint64) *Parser { + p := parsePool.Get().(*Parser) + + /* validate json if needed */ + if (opt & (1 << _F_validate_string)) != 0 && !utf8.ValidateString(data){ + dbuf := utf8.CorrectWith(nil, rt.Str2Mem(data[pos:]), "\ufffd") + dbuf = append(dbuf, padding...) + p.Json = rt.Mem2Str(dbuf[:len(dbuf) - len(padding)]) + p.Utf8Inv = true + p.start = uintptr((*rt.GoString)(unsafe.Pointer(&p.Json)).Ptr) + } else { + p.Json = data + // TODO: prevent too large JSON + p.padded = append(p.padded, data[pos:]...) + p.padded = append(p.padded, padding...) + p.start = uintptr((*rt.GoSlice)(unsafe.Pointer(&p.padded)).Ptr) + } + + p.cur = p.start + p.end = p.cur + uintptr(len(p.Json)) + p.options = opt + p.nbuf.init(p.nodes) + return p +} + + +func (p *Parser) Pos() int { + return int(p.cur - p.start) +} + +func (p *Parser) JsonBytes() []byte { + if p.Utf8Inv { + return (rt.Str2Mem(p.Json)) + } else { + return p.padded + } +} + +var nodeType = rt.UnpackType(reflect.TypeOf(node{})) + +//go:inline +func calMaxNodeCap(jsonSize int) int { + return jsonSize / 2 + 2 +} + +func (p *Parser) parse() ErrorCode { + // when decode into struct, we should decode number as possible + old := p.options + if !p.isEface { + p.options &^= 1 << _F_use_number + } + + // fast path with limited node buffer + err := ErrorCode(native.ParseWithPadding(unsafe.Pointer(p))) + if err != SONIC_VISIT_FAILED { + p.options = old + return err + } + + // check OoB here + offset := p.nbuf.ncur - p.nbuf.nstart + curLen := offset / unsafe.Sizeof(node{}) + if curLen != uintptr(len(p.nodes)) { + panic(fmt.Sprintf("current len: %d, real len: %d cap: %d", curLen, len(p.nodes), cap(p.nodes))) + } + + // node buf is not enough, continue parse + // the maxCap is always meet all valid JSON + maxCap := calMaxNodeCap(len(p.Json)) + slice := rt.GoSlice{ + Ptr: rt.Mallocgc(uintptr(maxCap) * nodeType.Size, nodeType, false), + Len: maxCap, + Cap: maxCap, + } + rt.Memmove(unsafe.Pointer(slice.Ptr), unsafe.Pointer(&p.nodes[0]), offset) + p.backup = p.nodes + p.nodes = *(*[]node)(unsafe.Pointer(&slice)) + + // update node cursor + p.nbuf.nstart = uintptr(unsafe.Pointer(&p.nodes[0])) + p.nbuf.nend = p.nbuf.nstart + uintptr(cap(p.nodes)) * unsafe.Sizeof(node{}) + p.nbuf.ncur = p.nbuf.nstart + offset + + // continue parse json + err = ErrorCode(native.ParseWithPadding(unsafe.Pointer(p))) + p.options = old + return err +} + +func (p *Parser) reset() { + p.options = 0 + p.padded = p.padded[:0] + // nodes is too large here, we will not reset it and use small backup nodes buffer + if p.backup != nil { + p.nodes = p.backup + p.backup = nil + } + p.start = 0 + p.cur = 0 + p.end = 0 + p.Json = "" + p.nbuf = nodeBuf{} + p._nbk = _nospaceBlock{} + p.Utf8Inv = false + p.isEface = false +} + +func (p *Parser) free() { + p.reset() + parsePool.Put(p) +} + +//go:noinline +func (p *Parser) fixError(code ErrorCode) error { + if code == SONIC_OK { + return nil + } + + if p.Pos() == 0 { + code = SONIC_EOF; + } + + pos := p.Pos() - 1 + return error_syntax(pos, p.Json, ParsingErrors[code]) +} + +func Parse(data string, opt uint64) error { + p := newParser(data, 0, opt) + err := p.parse() + p.free() + return err +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/node.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/node.go new file mode 100644 index 0000000000000000000000000000000000000000..690fbd5d4ac10c246c52f1f2279d235b508e90cb --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/node.go @@ -0,0 +1,1278 @@ +package optdec + +import ( + "encoding/json" + "math" + "unsafe" + + "github.com/bytedance/sonic/internal/envs" + "github.com/bytedance/sonic/internal/rt" +) + +type Context struct { + Parser *Parser + efacePool *efacePool + Stack bounedStack + Utf8Inv bool +} + +func (ctx *Context) Options() uint64 { + return ctx.Parser.options +} + +/************************* Stack and Pool Helper *******************/ + +type parentStat struct { + con unsafe.Pointer + remain uint64 +} +type bounedStack struct { + stack []parentStat + index int +} + +func newStack(size int) bounedStack { + return bounedStack{ + stack: make([]parentStat, size + 2), + index: 0, + } +} + +//go:nosplit +func (s *bounedStack) Pop() (unsafe.Pointer, int, bool){ + s.index-- + con := s.stack[s.index].con + remain := s.stack[s.index].remain &^ (uint64(1) << 63) + isObj := (s.stack[s.index].remain & (uint64(1) << 63)) != 0 + s.stack[s.index].con = nil + s.stack[s.index].remain = 0 + return con, int(remain), isObj +} + +//go:nosplit +func (s *bounedStack) Push(p unsafe.Pointer, remain int, isObj bool) { + s.stack[s.index].con = p + s.stack[s.index].remain = uint64(remain) + if isObj { + s.stack[s.index].remain |= (uint64(1) << 63) + } + s.index++ +} + +type efacePool struct{ + t64 rt.T64Pool + tslice rt.TslicePool + tstring rt.TstringPool + efaceSlice rt.SlicePool +} + +func newEfacePool(stat *jsonStat, useNumber bool) *efacePool { + strs := int(stat.str) + nums := 0 + if useNumber { + strs += int(stat.number) + } else { + nums = int(stat.number) + } + + return &efacePool{ + t64: rt.NewT64Pool(nums), + tslice: rt.NewTslicePool(int(stat.array)), + tstring: rt.NewTstringPool(strs), + efaceSlice: rt.NewPool(rt.AnyType, int(stat.array_elems)), + } +} + +func (self *efacePool) GetMap(hint int) unsafe.Pointer { + m := make(map[string]interface{}, hint) + return *(*unsafe.Pointer)(unsafe.Pointer(&m)) +} + +func (self *efacePool) GetSlice(hint int) unsafe.Pointer { + return unsafe.Pointer(self.efaceSlice.GetSlice(hint)) +} + +func (self *efacePool) ConvTSlice(val rt.GoSlice, typ *rt.GoType, dst unsafe.Pointer) { + self.tslice.Conv(val, typ, (*interface{})(dst)) +} + +func (self *efacePool) ConvF64(val float64, dst unsafe.Pointer) { + self.t64.Conv(castU64(val), rt.Float64Type, (*interface{})(dst)) +} + +func (self *efacePool) ConvTstring(val string, dst unsafe.Pointer) { + self.tstring.Conv(val, (*interface{})(dst)) +} + +func (self *efacePool) ConvTnum(val json.Number, dst unsafe.Pointer) { + self.tstring.ConvNum(val, (*interface{})(dst)) +} + +/********************************************************/ + +func canUseFastMap( opts uint64, root *rt.GoType) bool { + return envs.UseFastMap && (opts & (1 << _F_copy_string)) == 0 && (opts & (1 << _F_use_int64)) == 0 && (root == rt.AnyType || root == rt.MapEfaceType || root == rt.SliceEfaceType) +} + +func NewContext(json string, pos int, opts uint64, root *rt.GoType) (Context, error) { + ctx := Context{ + Parser: newParser(json, pos, opts), + } + if root == rt.AnyType || root == rt.MapEfaceType || root == rt.SliceEfaceType { + ctx.Parser.isEface = true + } + + ecode := ctx.Parser.parse() + + if ecode != 0 { + return ctx, ctx.Parser.fixError(ecode) + } + + useNumber := (opts & (1 << _F_use_number )) != 0 + if canUseFastMap(opts, root) { + ctx.efacePool = newEfacePool(&ctx.Parser.nbuf.stat, useNumber) + ctx.Stack = newStack(int(ctx.Parser.nbuf.stat.max_depth)) + } + + return ctx, nil +} + +func (ctx *Context) Delete() { + ctx.Parser.free() + ctx.Parser = nil +} + +type Node struct { + cptr uintptr +} + +func NewNode(cptr uintptr) Node { + return Node{cptr: cptr} +} + +type Dom struct { + cdom uintptr +} + +func (ctx *Context) Root() Node { + root := (uintptr)(((*rt.GoSlice)(unsafe.Pointer(&ctx.Parser.nodes))).Ptr) + return Node{cptr: root} +} + +type Array struct { + cptr uintptr +} + +type Object struct { + cptr uintptr +} + +func (obj Object) Len() int { + cobj := ptrCast(obj.cptr) + return int(uint64(cobj.val) & ConLenMask) +} + +func (arr Array) Len() int { + carr := ptrCast(arr.cptr) + return int(uint64(carr.val) & ConLenMask) +} + +// / Helper functions to eliminate CGO calls +func (val Node) Type() uint8 { + ctype := ptrCast(val.cptr) + return uint8(ctype.typ & TypeMask) +} + +func (val Node) Next() uintptr { + if val.Type() != KObject && val.Type() != KArray { + return PtrOffset(val.cptr, 1) + } + cobj := ptrCast(val.cptr) + offset := int64(uint64(cobj.val) >> ConLenBits) + return PtrOffset(val.cptr, offset) +} + +func (val *Node) next() { + *val = NewNode(val.Next()) +} + +type NodeIter struct { + next uintptr +} + +func NewNodeIter(node Node) NodeIter { + return NodeIter{next: node.cptr} +} + +func (iter *NodeIter) Next() Node { + ret := NewNode(iter.next) + iter.next = PtrOffset(iter.next, 1) + return ret +} + + +func (iter *NodeIter) Peek() Node { + return NewNode(iter.next) +} + +func (val Node) U64() uint64 { + cnum := ptrCast(val.cptr) + return *(*uint64)((unsafe.Pointer)(&(cnum.val))) +} + +func (val Node) I64() int64 { + cnum := ptrCast(val.cptr) + return *(*int64)((unsafe.Pointer)(&(cnum.val))) +} + +func (val Node) IsNull() bool { + return val.Type() == KNull +} + +func (val Node) IsNumber() bool { + return val.Type() & KNumber != 0 +} + +func (val Node) F64() float64 { + cnum := ptrCast(val.cptr) + return *(*float64)((unsafe.Pointer)(&(cnum.val))) +} + +func (val Node) Bool() bool { + return val.Type() == KTrue +} + +func (self Node) AsU64(ctx *Context) (uint64, bool) { + if self.Type() == KUint { + return self.U64(), true + } else if self.Type() == KRawNumber { + num, err := ParseU64(self.Raw(ctx)) + if err != nil { + return 0, false + } + return num, true + } else { + return 0, false + } +} + +func (val *Node) AsObj() (Object, bool) { + var ret Object + if val.Type() != KObject { + return ret, false + } + return Object{ + cptr: val.cptr, + }, true +} + +func (val Node) Obj() Object { + return Object{cptr: val.cptr} +} + +func (val Node) Arr() Array { + return Array{cptr: val.cptr} +} + +func (val *Node) AsArr() (Array, bool) { + var ret Array + if val.Type() != KArray { + return ret, false + } + return Array{ + cptr: val.cptr, + }, true +} + +func (self Node) AsI64(ctx *Context) (int64, bool) { + typ := self.Type() + if typ == KUint && self.U64() <= math.MaxInt64 { + return int64(self.U64()), true + } else if typ == KSint { + return self.I64(), true + } else if typ == KRawNumber { + val, err := self.Number(ctx).Int64() + if err != nil { + return 0, false + } + return val, true + } else { + return 0, false + } +} + +/********* Parse Node String into Value ***************/ + +func (val Node) ParseI64(ctx *Context) (int64, bool) { + s, ok := val.AsStrRef(ctx) + if !ok { + return 0, false + } + + if s == "null" { + return 0, true + } + + i, err := ParseI64(s) + if err != nil { + return 0, false + } + return i, true +} + +func (val Node) ParseBool(ctx *Context) (bool, bool) { + s, ok := val.AsStrRef(ctx) + if !ok { + return false, false + } + + if s == "null" { + return false, true + } + + b, err := ParseBool(s) + if err != nil { + return false, false + } + return b, true +} + +func (val Node) ParseU64(ctx *Context) (uint64, bool) { + s, ok := val.AsStrRef(ctx) + if !ok { + return 0, false + } + + if s == "null" { + return 0, true + } + + i, err := ParseU64(s) + if err != nil { + return 0, false + } + return i, true +} + +func (val Node) ParseF64(ctx *Context) (float64, bool) { + s, ok := val.AsStrRef(ctx) + if !ok { + return 0, false + } + + if s == "null" { + return 0, true + } + + i, err := ParseF64(s) + if err != nil { + return 0, false + } + return i, true +} + +func (val Node) ParseString(ctx *Context) (string, bool) { + // shoud not use AsStrRef + s, ok := val.AsStr(ctx) + if !ok { + return "", false + } + + if s == "null" { + return "", true + } + + s, err := Unquote(s) + if err != nil { + return "", false + } + return s, true +} + + +func (val Node) ParseNumber(ctx *Context) (json.Number, bool) { + // shoud not use AsStrRef + s, ok := val.AsStr(ctx) + if !ok { + return json.Number(""), false + } + + if s == "null" { + return json.Number(""), true + } + + end, err := SkipNumberFast(s, 0) + // has error or trailing chars + if err != nil || end != len(s) { + return json.Number(""), false + } + return json.Number(s), true +} + + + +func (val Node) AsF64(ctx *Context) (float64, bool) { + switch val.Type() { + case KUint: return float64(val.U64()), true + case KSint: return float64(val.I64()), true + case KReal: return float64(val.F64()), true + case KRawNumber: f, err := val.Number(ctx).Float64(); return f, err == nil + default: return 0, false + } +} + +func (val Node) AsBool() (bool, bool) { + switch val.Type() { + case KTrue: return true, true + case KFalse: return false, true + default: return false, false + } +} + +func (val Node) AsStr(ctx *Context) (string, bool) { + switch val.Type() { + case KStringCommon: + s := val.StringRef(ctx) + if (ctx.Options() & (1 << _F_copy_string) == 0) { + return s, true + } + return string(rt.Str2Mem(s)), true + case KStringEscaped: + return val.StringCopyEsc(ctx), true + default: return "", false + } +} + +func (val Node) AsStrRef(ctx *Context) (string, bool) { + switch val.Type() { + case KStringEscaped: + node := ptrCast(val.cptr) + offset := val.Position() + len := int(node.val) + return rt.Mem2Str(ctx.Parser.JsonBytes()[offset : offset + len]), true + case KStringCommon: + return val.StringRef(ctx), true + default: + return "", false + } +} + +func (val Node) AsBytesRef(ctx *Context) ([]byte, bool) { + switch val.Type() { + case KStringEscaped: + node := ptrCast(val.cptr) + offset := val.Position() + len := int(node.val) + return ctx.Parser.JsonBytes()[offset : offset + len], true + case KStringCommon: + return rt.Str2Mem(val.StringRef(ctx)), true + default: + return nil, false + } +} + +func (val Node) AsStringText(ctx *Context) ([]byte, bool) { + if !val.IsStr() { + return nil, false + } + + // clone to new bytes + s, b := val.AsStrRef(ctx) + return []byte(s), b +} + +func (val Node) IsStr() bool { + return (val.Type() == KStringCommon) || (val.Type() == KStringEscaped) +} + +func (val Node) IsRawNumber() bool { + return val.Type() == KRawNumber +} + +func (val Node) Number(ctx *Context) json.Number { + return json.Number(val.Raw(ctx)) +} + +func (val Node) Raw(ctx *Context) string { + node := ptrCast(val.cptr) + len := int(node.val) + offset := val.Position() + return ctx.Parser.Json[offset:int(offset+len)] +} + +func (val Node) Position() int { + node := ptrCast(val.cptr) + return int(node.typ >> PosBits) +} + +func (val Node) AsNumber(ctx *Context) (json.Number, bool) { + // parse JSON string as number + if val.IsStr() { + s, _ := val.AsStr(ctx) + if !ValidNumberFast(s) { + return "", false + } else { + return json.Number(s), true + } + } + + return val.NonstrAsNumber(ctx) +} + +func (val Node) NonstrAsNumber(ctx *Context) (json.Number, bool) { + // deal with raw number + if val.IsRawNumber() { + return val.Number(ctx), true + } + + // deal with parse number + if !val.IsNumber() { + return json.Number(""), false + } + + start := val.Position() + end, err := SkipNumberFast(ctx.Parser.Json, start) + if err != nil { + return "", false + } + return json.Number(ctx.Parser.Json[start:end]), true +} + +func (val Node) AsRaw(ctx *Context) string { + // fast path for unescaped strings + switch val.Type() { + case KNull: + return "null" + case KTrue: + return "true" + case KFalse: + return "false" + case KStringCommon: + node := ptrCast(val.cptr) + len := int(node.val) + offset := val.Position() + // add start abd end quote + ref := rt.Str2Mem(ctx.Parser.Json)[offset-1 : offset+len+1] + return rt.Mem2Str(ref) + case KRawNumber: fallthrough + case KRaw: return val.Raw(ctx) + case KStringEscaped: + raw, _ := SkipOneFast(ctx.Parser.Json, val.Position() - 1) + return raw + default: + raw, err := SkipOneFast(ctx.Parser.Json, val.Position()) + if err != nil { + break + } + return raw + } + panic("should always be valid json here") +} + +// reference from the input JSON as possible +func (val Node) StringRef(ctx *Context) string { + return val.Raw(ctx) +} + +//go:nocheckptr +func ptrCast(p uintptr) *node { + return (*node)(unsafe.Pointer(p)) +} + +func (val Node) StringCopyEsc(ctx *Context) string { + // check whether there are in padded + node := ptrCast(val.cptr) + len := int(node.val) + offset := val.Position() + return string(ctx.Parser.JsonBytes()[offset : offset + len]) +} + +func (val Node) Object() Object { + return Object{cptr: val.cptr} +} + +func (val Node) Array() Array { + return Array{cptr: val.cptr} +} + +func (val *Array) Children() uintptr { + return PtrOffset(val.cptr, 1) +} + +func (val *Object) Children() uintptr { + return PtrOffset(val.cptr, 1) +} + +func (val *Node) Equal(ctx *Context, lhs string) bool { + // check whether escaped + cstr := ptrCast(val.cptr) + offset := int(val.Position()) + len := int(cstr.val) + return lhs == ctx.Parser.Json[offset:offset+len] +} + +func (node *Node) AsMapEface(ctx *Context, vp unsafe.Pointer) error { + if node.IsNull() { + return nil + } + + obj, ok := node.AsObj() + if !ok { + return newUnmatched(node.Position(), rt.MapEfaceType) + } + + var err, gerr error + size := obj.Len() + + var m map[string]interface{} + if *(*unsafe.Pointer)(vp) == nil { + if ctx.efacePool != nil { + p := ctx.efacePool.GetMap(size) + m = *(*map[string]interface{})(unsafe.Pointer(&p)) + } else { + m = make(map[string]interface{}, size) + } + } else { + m = *(*map[string]interface{})(vp) + } + + next := obj.Children() + for i := 0; i < size; i++ { + knode := NewNode(next) + key, _ := knode.AsStr(ctx) + val := NewNode(PtrOffset(next, 1)) + m[key], err = val.AsEface(ctx) + next = val.cptr + if gerr == nil && err != nil { + gerr = err + } + } + + *(*map[string]interface{})(vp) = m + return gerr +} + +func (node *Node) AsMapString(ctx *Context, vp unsafe.Pointer) error { + obj, ok := node.AsObj() + if !ok { + return newUnmatched(node.Position(), rt.MapStringType) + } + + size := obj.Len() + + var m map[string]string + if *(*unsafe.Pointer)(vp) == nil { + m = make(map[string]string, size) + } else { + m = *(*map[string]string)(vp) + } + + var gerr error + next := obj.Children() + for i := 0; i < size; i++ { + knode := NewNode(next) + key, _ := knode.AsStr(ctx) + val := NewNode(PtrOffset(next, 1)) + m[key], ok = val.AsStr(ctx) + if !ok { + if gerr == nil { + gerr = newUnmatched(val.Position(), rt.StringType) + } + next = val.Next() + } else { + next = PtrOffset(val.cptr, 1) + } + } + + *(*map[string]string)(vp) = m + return gerr +} + +func (node *Node) AsSliceEface(ctx *Context, vp unsafe.Pointer) error { + arr, ok := node.AsArr() + if !ok { + return newUnmatched(node.Position(), rt.SliceEfaceType) + } + + size := arr.Len() + var s []interface{} + if size != 0 && ctx.efacePool != nil { + slice := rt.GoSlice { + Ptr: ctx.efacePool.GetSlice(size), + Len: size, + Cap: size, + } + *(*rt.GoSlice)(unsafe.Pointer(&s)) = slice + } else { + s = *(*[]interface{})((unsafe.Pointer)(rt.MakeSlice(vp, rt.AnyType, size))) + } + + *node = NewNode(arr.Children()) + + var err, gerr error + for i := 0; i < size; i++ { + s[i], err = node.AsEface(ctx) + if gerr == nil && err != nil { + gerr = err + } + } + + *(*[]interface{})(vp) = s + return nil +} + +func (node *Node) AsSliceI32(ctx *Context, vp unsafe.Pointer) error { + arr, ok := node.AsArr() + if !ok { + return newUnmatched(node.Position(), rt.SliceI32Type) + } + + size := arr.Len() + s := *(*[]int32)((unsafe.Pointer)(rt.MakeSlice(vp, rt.Int32Type, size))) + next := arr.Children() + + var gerr error + for i := 0; i < size; i++ { + val := NewNode(next) + ret, ok := val.AsI64(ctx) + if !ok || ret > math.MaxInt32 || ret < math.MinInt32 { + if gerr == nil { + gerr = newUnmatched(val.Position(), rt.Int32Type) + } + next = val.Next() + } else { + s[i] = int32(ret) + next = PtrOffset(val.cptr, 1) + } + } + + *(*[]int32)(vp) = s + return gerr +} + +func (node *Node) AsSliceI64(ctx *Context, vp unsafe.Pointer) error { + arr, ok := node.AsArr() + if !ok { + return newUnmatched(node.Position(), rt.SliceI64Type) + } + + size := arr.Len() + s := *(*[]int64)((unsafe.Pointer)(rt.MakeSlice(vp, rt.Int64Type, size))) + next := arr.Children() + + var gerr error + for i := 0; i < size; i++ { + val := NewNode(next) + + ret, ok := val.AsI64(ctx) + if !ok { + if gerr == nil { + gerr = newUnmatched(val.Position(), rt.Int64Type) + } + next = val.Next() + } else { + s[i] = ret + next = PtrOffset(val.cptr, 1) + } + } + + *(*[]int64)(vp) = s + return gerr +} + +func (node *Node) AsSliceU32(ctx *Context, vp unsafe.Pointer) error { + arr, ok := node.AsArr() + if !ok { + return newUnmatched(node.Position(), rt.SliceU32Type) + } + + size := arr.Len() + next := arr.Children() + s := *(*[]uint32)((unsafe.Pointer)(rt.MakeSlice(vp, rt.Uint32Type, size))) + + var gerr error + for i := 0; i < size; i++ { + val := NewNode(next) + ret, ok := val.AsU64(ctx) + if !ok || ret > math.MaxUint32 { + if gerr == nil { + gerr = newUnmatched(val.Position(), rt.Uint32Type) + } + next = val.Next() + } else { + s[i] = uint32(ret) + next = PtrOffset(val.cptr, 1) + } + } + + *(*[]uint32)(vp) = s + return gerr +} + +func (node *Node) AsSliceU64(ctx *Context, vp unsafe.Pointer) error { + arr, ok := node.AsArr() + if !ok { + return newUnmatched(node.Position(), rt.SliceU64Type) + } + + size := arr.Len() + next := arr.Children() + + s := *(*[]uint64)((unsafe.Pointer)(rt.MakeSlice(vp, rt.Uint64Type, size))) + var gerr error + for i := 0; i < size; i++ { + val := NewNode(next) + ret, ok := val.AsU64(ctx) + if !ok { + if gerr == nil { + gerr = newUnmatched(val.Position(), rt.Uint64Type) + } + next = val.Next() + } else { + s[i] = ret + next = PtrOffset(val.cptr, 1) + } + } + + *(*[]uint64)(vp) = s + return gerr +} + +func (node *Node) AsSliceString(ctx *Context, vp unsafe.Pointer) error { + arr, ok := node.AsArr() + if !ok { + return newUnmatched(node.Position(), rt.SliceStringType) + } + + size := arr.Len() + next := arr.Children() + s := *(*[]string)((unsafe.Pointer)(rt.MakeSlice(vp, rt.StringType, size))) + + var gerr error + for i := 0; i < size; i++ { + val := NewNode(next) + ret, ok := val.AsStr(ctx) + if !ok { + if gerr == nil { + gerr = newUnmatched(val.Position(), rt.StringType) + } + next = val.Next() + } else { + s[i] = ret + next = PtrOffset(val.cptr, 1) + } + } + + *(*[]string)(vp) = s + return gerr +} + +func (node *Node) AsSliceBytes(ctx *Context) ([]byte, error) { + b, ok := node.AsBytesRef(ctx) + if !ok { + return nil, newUnmatched(node.Position(), rt.BytesType) + } + + b64, err := rt.DecodeBase64(b) + if err != nil { + return nil, newUnmatched(node.Position(), rt.BytesType) + } + return b64, nil +} + +// AsEface will always ok, because we have parse in native. +func (node *Node) AsEface(ctx *Context) (interface{}, error) { + if ctx.efacePool != nil { + iter := NewNodeIter(*node) + v := AsEfaceFast(&iter, ctx) + *node = iter.Peek() + return v, nil + } else { + return node.AsEfaceFallback(ctx) + } +} + +func parseSingleNode(node Node, ctx *Context) interface{} { + var v interface{} + switch node.Type() { + case KObject: v = map[string]interface{}{} + case KArray: v = []interface{}{} + case KStringCommon: v = node.StringRef(ctx) + case KStringEscaped: v = node.StringCopyEsc(ctx) + case KTrue: v = true + case KFalse: v = false + case KNull: v = nil + case KUint: v = float64(node.U64()) + case KSint: v = float64(node.I64()) + case KReal: v = float64(node.F64()) + case KRawNumber: v = node.Number(ctx) + default: panic("unreachable for as eface") + } + return v +} + +func castU64(val float64) uint64 { + return *((*uint64)(unsafe.Pointer((&val)))) +} + +func AsEfaceFast(iter *NodeIter, ctx *Context) interface{} { + var mp, sp, parent unsafe.Pointer // current container pointer + var node Node + var size int + var isObj bool + var slice rt.GoSlice + var val unsafe.Pointer + var vt **rt.GoType + var vp *unsafe.Pointer + var rootM unsafe.Pointer + var rootS rt.GoSlice + var root interface{} + var key string + + node = iter.Next() + + switch node.Type() { + case KObject: + size = node.Object().Len() + if size != 0 { + ctx.Stack.Push(nil, 0, true) + mp = ctx.efacePool.GetMap(size) + rootM = mp + isObj = true + goto _object_key + } else { + return rt.GoEface { + Type: rt.MapEfaceType, + Value: ctx.efacePool.GetMap(0), + }.Pack() + } + case KArray: + size = node.Array().Len() + if size != 0 { + ctx.Stack.Push(nil, 0, false) + sp = ctx.efacePool.GetSlice(size) + slice = rt.GoSlice { + Ptr: sp, + Len: size, + Cap: size, + } + rootS = slice + isObj = false + val = sp + goto _arr_val; + } else { + ctx.efacePool.ConvTSlice(rt.EmptySlice, rt.SliceEfaceType, unsafe.Pointer(&root)) + } + case KStringCommon: ctx.efacePool.ConvTstring(node.StringRef(ctx), unsafe.Pointer(&root)) + case KStringEscaped: ctx.efacePool.ConvTstring(node.StringCopyEsc(ctx), unsafe.Pointer(&root)) + case KTrue: root = true + case KFalse: root = false + case KNull: root = nil + case KUint: ctx.efacePool.ConvF64(float64(node.U64()), unsafe.Pointer(&root)) + case KSint: ctx.efacePool.ConvF64(float64(node.I64()), unsafe.Pointer(&root)) + case KReal: ctx.efacePool.ConvF64(node.F64(), unsafe.Pointer(&root)) + case KRawNumber: ctx.efacePool.ConvTnum(node.Number(ctx), unsafe.Pointer(&root)) + default: panic("unreachable for as eface") + } + return root + +_object_key: + node = iter.Next() + if node.Type() == KStringCommon { + key = node.StringRef(ctx) + } else { + key = node.StringCopyEsc(ctx) + } + + // interface{} slot in map bucket + val = rt.Mapassign_faststr(rt.MapEfaceMapType, mp, key) + vt = &(*rt.GoEface)(val).Type + vp = &(*rt.GoEface)(val).Value + + // parse value node + node = iter.Next() + switch node.Type() { + case KObject: + newSize := node.Object().Len() + newMp := ctx.efacePool.GetMap(newSize) + *vt = rt.MapEfaceType + *vp = newMp + remain := size - 1 + isObj = true + if newSize != 0 { + if remain > 0 { + ctx.Stack.Push(mp, remain, true) + } + mp = newMp + size = newSize + goto _object_key; + } + case KArray: + newSize := node.Array().Len() + if newSize == 0 { + ctx.efacePool.ConvTSlice(rt.EmptySlice, rt.SliceEfaceType, val) + break; + } + + newSp := ctx.efacePool.GetSlice(newSize) + // pack to []interface{} + ctx.efacePool.ConvTSlice(rt.GoSlice{ + Ptr: newSp, + Len: newSize, + Cap: newSize, + }, rt.SliceEfaceType, val) + remain := size - 1 + if remain > 0 { + ctx.Stack.Push(mp, remain, true) + } + val = newSp + isObj = false + size = newSize + goto _arr_val; + case KStringCommon: + ctx.efacePool.ConvTstring(node.StringRef(ctx), val) + case KStringEscaped: + ctx.efacePool.ConvTstring(node.StringCopyEsc(ctx), val) + case KTrue: + rt.ConvTBool(true, (*interface{})(val)) + case KFalse: + rt.ConvTBool(false, (*interface{})(val)) + case KNull: /* skip */ + case KUint: + ctx.efacePool.ConvF64(float64(node.U64()), val) + case KSint: + ctx.efacePool.ConvF64(float64(node.I64()), val) + case KReal: + ctx.efacePool.ConvF64(node.F64(), val) + case KRawNumber: + ctx.efacePool.ConvTnum(node.Number(ctx), val) + default: + panic("unreachable for as eface") + } + + // check size + size -= 1 + if size != 0 { + goto _object_key; + } + + parent, size, isObj = ctx.Stack.Pop() + + // parent is empty + if parent == nil { + if isObj { + return rt.GoEface { + Type: rt.MapEfaceType, + Value: rootM, + }.Pack() + } else { + ctx.efacePool.ConvTSlice(rootS, rt.SliceEfaceType, (unsafe.Pointer)(&root)) + return root + } + } + + // continue to parse parent + if isObj { + mp = parent + goto _object_key; + } else { + val = rt.PtrAdd(parent, rt.AnyType.Size) + goto _arr_val; + } + +_arr_val: + // interface{} slot in slice + vt = &(*rt.GoEface)(val).Type + vp = &(*rt.GoEface)(val).Value + + // parse value node + node = iter.Next() + switch node.Type() { + case KObject: + newSize := node.Object().Len() + newMp := ctx.efacePool.GetMap(newSize) + *vt = rt.MapEfaceType + *vp = newMp + remain := size - 1 + if newSize != 0 { + // push next array elem into stack + if remain > 0 { + ctx.Stack.Push(val, remain, false) + } + mp = newMp + size = newSize + isObj = true + goto _object_key; + } + case KArray: + newSize := node.Array().Len() + if newSize == 0 { + ctx.efacePool.ConvTSlice(rt.EmptySlice, rt.SliceEfaceType, val) + break; + } + + newSp := ctx.efacePool.GetSlice(newSize) + // pack to []interface{} + ctx.efacePool.ConvTSlice(rt.GoSlice { + Ptr: newSp, + Len: newSize, + Cap: newSize, + }, rt.SliceEfaceType, val) + + remain := size - 1 + if remain > 0 { + ctx.Stack.Push(val, remain, false) + } + + val = newSp + isObj = false + size = newSize + goto _arr_val; + case KStringCommon: + ctx.efacePool.ConvTstring(node.StringRef(ctx), val) + case KStringEscaped: + ctx.efacePool.ConvTstring(node.StringCopyEsc(ctx), val) + case KTrue: + rt.ConvTBool(true, (*interface{})(val)) + case KFalse: + rt.ConvTBool(false, (*interface{})(val)) + case KNull: /* skip */ + case KUint: + ctx.efacePool.ConvF64(float64(node.U64()), val) + case KSint: + ctx.efacePool.ConvF64(float64(node.I64()), val) + case KReal: + ctx.efacePool.ConvF64(node.F64(), val) + case KRawNumber: + ctx.efacePool.ConvTnum(node.Number(ctx), val) + default: panic("unreachable for as eface") + } + + // check size + size -= 1 + if size != 0 { + val = rt.PtrAdd(val, rt.AnyType.Size) + goto _arr_val; + } + + + parent, size, isObj = ctx.Stack.Pop() + + // parent is empty + if parent == nil { + if isObj { + return rt.GoEface { + Type: rt.MapEfaceType, + Value: rootM, + }.Pack() + } else { + ctx.efacePool.ConvTSlice(rootS, rt.SliceEfaceType, unsafe.Pointer(&root)) + return root + } + } + + // continue to parse parent + if isObj { + mp = parent + goto _object_key; + } else { + val = rt.PtrAdd(parent, rt.AnyType.Size) + goto _arr_val; + } +} + +func (node *Node) AsEfaceFallback(ctx *Context) (interface{}, error) { + switch node.Type() { + case KObject: + obj := node.Object() + size := obj.Len() + m := make(map[string]interface{}, size) + *node = NewNode(obj.Children()) + var gerr, err error + for i := 0; i < size; i++ { + key, _ := node.AsStr(ctx) + *node = NewNode(PtrOffset(node.cptr, 1)) + m[key], err = node.AsEfaceFallback(ctx) + if gerr == nil && err != nil { + gerr = err + } + } + return m, gerr + case KArray: + arr := node.Array() + size := arr.Len() + a := make([]interface{}, size) + *node = NewNode(arr.Children()) + var gerr, err error + for i := 0; i < size; i++ { + a[i], err = node.AsEfaceFallback(ctx) + if gerr == nil && err != nil { + gerr = err + } + } + return a, gerr + case KStringCommon: + str, _ := node.AsStr(ctx) + *node = NewNode(PtrOffset(node.cptr, 1)) + return str, nil + case KStringEscaped: + str := node.StringCopyEsc(ctx) + *node = NewNode(PtrOffset(node.cptr, 1)) + return str, nil + case KTrue: + *node = NewNode(PtrOffset(node.cptr, 1)) + return true, nil + case KFalse: + *node = NewNode(PtrOffset(node.cptr, 1)) + return false, nil + case KNull: + *node = NewNode(PtrOffset(node.cptr, 1)) + return nil, nil + default: + // use float64 + if ctx.Parser.options & (1 << _F_use_number) != 0 { + num, ok := node.AsNumber(ctx) + if !ok { + // skip the unmacthed type + *node = NewNode(node.Next()) + return nil, newUnmatched(node.Position(), rt.JsonNumberType) + } else { + *node = NewNode(PtrOffset(node.cptr, 1)) + return num, nil + } + } else if ctx.Parser.options & (1 << _F_use_int64) != 0 { + // first try int64 + i, ok := node.AsI64(ctx) + if ok { + *node = NewNode(PtrOffset(node.cptr, 1)) + return i, nil + } + + // is not integer, then use float64 + f, ok := node.AsF64(ctx) + if ok { + *node = NewNode(PtrOffset(node.cptr, 1)) + return f, nil + } + + // skip the unmacthed type + *node = NewNode(node.Next()) + return nil, newUnmatched(node.Position(), rt.Int64Type) + } else { + num, ok := node.AsF64(ctx) + if !ok { + // skip the unmacthed type + *node = NewNode(node.Next()) + return nil, newUnmatched(node.Position(), rt.Float64Type) + } else { + *node = NewNode(PtrOffset(node.cptr, 1)) + return num, nil + } + } + } +} + +//go:nosplit +func PtrOffset(ptr uintptr, off int64) uintptr { + return uintptr(int64(ptr) + off * int64(unsafe.Sizeof(node{}))) +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/slice.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/slice.go new file mode 100644 index 0000000000000000000000000000000000000000..a94e422b36a5957c2e30242cd37cd9f61e521421 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/slice.go @@ -0,0 +1,224 @@ +package optdec + +import ( + "reflect" + "unsafe" + + "github.com/bytedance/sonic/internal/rt" +) + +type sliceDecoder struct { + elemType *rt.GoType + elemDec decFunc + typ reflect.Type +} + +var ( + emptyPtr = &struct{}{} +) + +func (d *sliceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*rt.GoSlice)(vp) = rt.GoSlice{} + return nil + } + + arr, ok := node.AsArr() + if !ok { + return error_mismatch(node, ctx, d.typ) + } + + slice := rt.MakeSlice(vp, d.elemType, arr.Len()) + elems := slice.Ptr + next := arr.Children() + + var gerr error + for i := 0; i < arr.Len(); i++ { + val := NewNode(next) + elem := unsafe.Pointer(uintptr(elems) + uintptr(i)*d.elemType.Size) + err := d.elemDec.FromDom(elem, val, ctx) + if gerr == nil && err != nil { + gerr = err + } + next = val.Next() + } + + *(*rt.GoSlice)(vp) = *slice + return gerr +} + +type arrayDecoder struct { + len int + elemType *rt.GoType + elemDec decFunc + typ reflect.Type +} + +//go:nocheckptr +func (d *arrayDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + arr, ok := node.AsArr() + if !ok { + return error_mismatch(node, ctx, d.typ) + } + + next := arr.Children() + i := 0 + + var gerr error + for ; i < d.len && i < arr.Len(); i++ { + elem := unsafe.Pointer(uintptr(vp) + uintptr(i)*d.elemType.Size) + val := NewNode(next) + err := d.elemDec.FromDom(elem, val, ctx) + if gerr == nil && err != nil { + gerr = err + } + next = val.Next() + } + + /* zero rest of array */ + ptr := unsafe.Pointer(uintptr(vp) + uintptr(i)*d.elemType.Size) + n := uintptr(d.len-i) * d.elemType.Size + rt.ClearMemory(d.elemType, ptr, n) + return gerr +} + +type sliceEfaceDecoder struct { +} + +func (d *sliceEfaceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*rt.GoSlice)(vp) = rt.GoSlice{} + return nil + } + + return node.AsSliceEface(ctx, vp) +} + +type sliceI32Decoder struct { +} + +func (d *sliceI32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*rt.GoSlice)(vp) = rt.GoSlice{} + return nil + } + + return node.AsSliceI32(ctx, vp) +} + +type sliceI64Decoder struct { +} + +func (d *sliceI64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*rt.GoSlice)(vp) = rt.GoSlice{} + return nil + } + + return node.AsSliceI64(ctx, vp) +} + +type sliceU32Decoder struct { +} + +func (d *sliceU32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*rt.GoSlice)(vp) = rt.GoSlice{} + return nil + } + + return node.AsSliceU32(ctx, vp) +} + +type sliceU64Decoder struct { +} + +func (d *sliceU64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*rt.GoSlice)(vp) = rt.GoSlice{} + return nil + } + + return node.AsSliceU64(ctx, vp) +} + +type sliceStringDecoder struct { +} + +func (d *sliceStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*rt.GoSlice)(vp) = rt.GoSlice{} + return nil + } + + return node.AsSliceString(ctx, vp) +} + +type sliceBytesDecoder struct { +} + +func (d *sliceBytesDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*rt.GoSlice)(vp) = rt.GoSlice{} + return nil + } + + s, err := node.AsSliceBytes(ctx) + if err != nil { + return err + } + + *(*[]byte)(vp) = s + return nil +} + +type sliceBytesUnmarshalerDecoder struct { + elemType *rt.GoType + elemDec decFunc + typ reflect.Type +} + +func (d *sliceBytesUnmarshalerDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*rt.GoSlice)(vp) = rt.GoSlice{} + return nil + } + + /* parse JSON string into `[]byte` */ + if node.IsStr() { + slice, err := node.AsSliceBytes(ctx) + if err != nil { + return err + } + *(*[]byte)(vp) = slice + return nil + } + + /* parse JSON array into `[]byte` */ + arr, ok := node.AsArr() + if !ok { + return error_mismatch(node, ctx, d.typ) + } + + slice := rt.MakeSlice(vp, d.elemType, arr.Len()) + elems := slice.Ptr + + var gerr error + next := arr.Children() + for i := 0; i < arr.Len(); i++ { + child := NewNode(next) + elem := unsafe.Pointer(uintptr(elems) + uintptr(i)*d.elemType.Size) + err := d.elemDec.FromDom(elem, child, ctx) + if gerr == nil && err != nil { + gerr = err + } + next = child.Next() + } + + *(*rt.GoSlice)(vp) = *slice + return gerr +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/stringopts.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/stringopts.go new file mode 100644 index 0000000000000000000000000000000000000000..627b5ebeae6ade5d673b5af0e958fe5e288121ce --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/stringopts.go @@ -0,0 +1,360 @@ +package optdec + +import ( + "encoding/json" + "math" + "unsafe" + + "github.com/bytedance/sonic/internal/rt" +) + +type ptrStrDecoder struct { + typ *rt.GoType + deref decFunc +} + +// Pointer Value is allocated in the Caller +func (d *ptrStrDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + s, ok := node.AsStrRef(ctx) + if !ok { + return error_mismatch(node, ctx, stringType) + } + + if s == "null" { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + if *(*unsafe.Pointer)(vp) == nil { + *(*unsafe.Pointer)(vp) = rt.Mallocgc(d.typ.Size, d.typ, true) + } + + return d.deref.FromDom(*(*unsafe.Pointer)(vp), node, ctx) +} + +type boolStringDecoder struct { +} + +func (d *boolStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + s, ok := node.AsStrRef(ctx) + if !ok { + return error_mismatch(node, ctx, stringType) + } + + if s == "null" { + return nil + } + + b, err := ParseBool(s) + if err != nil { + return error_mismatch(node, ctx, boolType) + } + + *(*bool)(vp) = b + return nil +} + +func parseI64(node Node, ctx *context) (int64, error, bool) { + if node.IsNull() { + return 0, nil, true + } + + s, ok := node.AsStrRef(ctx) + if !ok { + return 0, error_mismatch(node, ctx, stringType), false + } + + if s == "null" { + return 0, nil, true + } + + ret, err := ParseI64(s) + return ret, err, false +} + +type i8StringDecoder struct{} + +func (d *i8StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + ret, err, null := parseI64(node, ctx) + if null { + return nil + } + + if err != nil { + return err + } + + if ret > math.MaxInt8 || ret < math.MinInt8 { + return error_mismatch(node, ctx, int8Type) + } + + *(*int8)(vp) = int8(ret) + return nil +} + +type i16StringDecoder struct{} + +func (d *i16StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + ret, err, null := parseI64(node, ctx) + if null { + return nil + } + + if err != nil { + return err + } + + if ret > math.MaxInt16 || ret < math.MinInt16 { + return error_mismatch(node, ctx, int16Type) + } + + *(*int16)(vp) = int16(ret) + return nil +} + +type i32StringDecoder struct{} + +func (d *i32StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + ret, err, null := parseI64(node, ctx) + if null { + return nil + } + + if err != nil { + return err + } + + if ret > math.MaxInt32 || ret < math.MinInt32 { + return error_mismatch(node, ctx, int32Type) + } + + *(*int32)(vp) = int32(ret) + return nil +} + +type i64StringDecoder struct{} + +func (d *i64StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + ret, err, null := parseI64(node, ctx) + if null { + return nil + } + + if err != nil { + return err + } + + *(*int64)(vp) = int64(ret) + return nil +} + +func parseU64(node Node, ctx *context) (uint64, error, bool) { + if node.IsNull() { + return 0, nil, true + } + + s, ok := node.AsStrRef(ctx) + if !ok { + return 0, error_mismatch(node, ctx, stringType), false + } + + if s == "null" { + return 0, nil, true + } + + ret, err := ParseU64(s) + return ret, err, false +} + +type u8StringDecoder struct{} + +func (d *u8StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + ret, err, null := parseU64(node, ctx) + if null { + return nil + } + + if err != nil { + return err + } + + if ret > math.MaxUint8 { + return error_mismatch(node, ctx, uint8Type) + } + + *(*uint8)(vp) = uint8(ret) + return nil +} + +type u16StringDecoder struct{} + +func (d *u16StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + ret, err, null := parseU64(node, ctx) + if null { + return nil + } + + if err != nil { + return err + } + + if ret > math.MaxUint16 { + return error_mismatch(node, ctx, uint16Type) + } + + *(*uint16)(vp) = uint16(ret) + return nil +} + +type u32StringDecoder struct{} + +func (d *u32StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + ret, err, null := parseU64(node, ctx) + if null { + return nil + } + + if err != nil { + return err + } + + if ret > math.MaxUint32 { + return error_mismatch(node, ctx, uint32Type) + } + + *(*uint32)(vp) = uint32(ret) + return nil +} + + +type u64StringDecoder struct{} + +func (d *u64StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + ret, err, null := parseU64(node, ctx) + if null { + return nil + } + + if err != nil { + return err + } + + *(*uint64)(vp) = uint64(ret) + return nil +} + +type f32StringDecoder struct{} + +func (d *f32StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + s, ok := node.AsStrRef(ctx) + if !ok { + return error_mismatch(node, ctx, stringType) + } + + if s == "null" { + return nil + } + + ret, err := ParseF64(s) + if err != nil || ret > math.MaxFloat32 || ret < -math.MaxFloat32 { + return error_mismatch(node, ctx, float32Type) + } + + *(*float32)(vp) = float32(ret) + return nil +} + +type f64StringDecoder struct{} + +func (d *f64StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + s, ok := node.AsStrRef(ctx) + if !ok { + return error_mismatch(node, ctx, stringType) + } + + if s == "null" { + return nil + } + + ret, err := ParseF64(s) + if err != nil { + return error_mismatch(node, ctx, float64Type) + } + + *(*float64)(vp) = float64(ret) + return nil +} + +/* parse string field with string options */ +type strStringDecoder struct{} + +func (d *strStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + s, ok := node.AsStrRef(ctx) + if !ok { + return error_mismatch(node, ctx, stringType) + } + + if s == "null" { + return nil + } + + s, err := Unquote(s) + if err != nil { + return error_mismatch(node, ctx, stringType) + } + + *(*string)(vp) = s + return nil +} + +type numberStringDecoder struct{} + +func (d *numberStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + s, ok := node.AsStrRef(ctx) + if !ok { + return error_mismatch(node, ctx, stringType) + } + + if s == "null" { + return nil + } + + num, ok := node.ParseNumber(ctx) + if !ok { + return error_mismatch(node, ctx, jsonNumberType) + } + + end, err := SkipNumberFast(s, 0) + // has error or trailing chars + if err != nil || end != len(s) { + return error_mismatch(node, ctx, jsonNumberType) + } + + *(*json.Number)(vp) = json.Number(num) + return nil +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/structs.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/structs.go new file mode 100644 index 0000000000000000000000000000000000000000..bce2758f1e4575dd3bf16d612cfb60cb47a797e9 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/structs.go @@ -0,0 +1,61 @@ +package optdec + +import ( + "reflect" + "unsafe" + + caching "github.com/bytedance/sonic/internal/optcaching" + "github.com/bytedance/sonic/internal/resolver" +) + +type fieldEntry struct { + resolver.FieldMeta + fieldDec decFunc +} + +type structDecoder struct { + fieldMap caching.FieldLookup + fields []fieldEntry + structName string + typ reflect.Type +} + +func (d *structDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + var gerr error + obj, ok := node.AsObj() + if !ok { + return error_mismatch(node, ctx, d.typ) + } + + next := obj.Children() + for i := 0; i < obj.Len(); i++ { + key, _ := NewNode(next).AsStrRef(ctx) + val := NewNode(PtrOffset(next, 1)) + next = val.Next() + + // find field idx + idx := d.fieldMap.Get(key) + if idx == -1 { + if Options(ctx.Options())&OptionDisableUnknown != 0 { + return error_field(key) + } + continue + } + + offset := d.fields[idx].Path[0].Size + elem := unsafe.Pointer(uintptr(vp) + offset) + err := d.fields[idx].fieldDec.FromDom(elem, val, ctx) + + // deal with mismatch type errors + if gerr == nil && err != nil { + // TODO: better error info + gerr = err + } + } + return gerr +} + diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/types.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/types.go new file mode 100644 index 0000000000000000000000000000000000000000..fe1433eec5e8071735bfed57ebf9cb8ae13c7ec7 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/types.go @@ -0,0 +1,60 @@ +/* + * Copyright 2021 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package optdec + +import ( + "encoding" + "encoding/base64" + "encoding/json" + "reflect" + "unsafe" + + "github.com/bytedance/sonic/internal/rt" +) + +var ( + boolType = reflect.TypeOf(bool(false)) + byteType = reflect.TypeOf(byte(0)) + intType = reflect.TypeOf(int(0)) + int8Type = reflect.TypeOf(int8(0)) + int16Type = reflect.TypeOf(int16(0)) + int32Type = reflect.TypeOf(int32(0)) + int64Type = reflect.TypeOf(int64(0)) + uintType = reflect.TypeOf(uint(0)) + uint8Type = reflect.TypeOf(uint8(0)) + uint16Type = reflect.TypeOf(uint16(0)) + uint32Type = reflect.TypeOf(uint32(0)) + uint64Type = reflect.TypeOf(uint64(0)) + float32Type = reflect.TypeOf(float32(0)) + float64Type = reflect.TypeOf(float64(0)) + stringType = reflect.TypeOf("") + bytesType = reflect.TypeOf([]byte(nil)) + jsonNumberType = reflect.TypeOf(json.Number("")) + base64CorruptInputError = reflect.TypeOf(base64.CorruptInputError(0)) + anyType = rt.UnpackType(reflect.TypeOf((*interface{})(nil)).Elem()) +) + +var ( + errorType = reflect.TypeOf((*error)(nil)).Elem() + jsonUnmarshalerType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem() + encodingTextUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem() +) + +func rtype(t reflect.Type) (*rt.GoItab, *rt.GoType) { + p := (*rt.GoIface)(unsafe.Pointer(&t)) + return p.Itab, (*rt.GoType)(p.Value) +} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/mapiter.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/mapiter.go similarity index 52% rename from vendor/github.com/bytedance/sonic/internal/encoder/mapiter.go rename to vendor/github.com/bytedance/sonic/internal/encoder/alg/mapiter.go index 8a322b3afbd2164bf31a9ad3a50ff2dc29b43c1d..5d9956a90897cc9c5cfbd7b07c4331af5a6079cd 100644 --- a/vendor/github.com/bytedance/sonic/internal/encoder/mapiter.go +++ b/vendor/github.com/bytedance/sonic/internal/encoder/alg/mapiter.go @@ -14,15 +14,16 @@ * limitations under the License. */ -package encoder +package alg import ( "encoding" "reflect" + "strconv" "sync" "unsafe" - "github.com/bytedance/sonic/internal/native" + "github.com/bytedance/sonic/internal/encoder/vars" "github.com/bytedance/sonic/internal/rt" ) @@ -32,8 +33,8 @@ type _MapPair struct { m [32]byte } -type _MapIterator struct { - it rt.GoMapIterator // must be the first field +type MapIterator struct { + It rt.GoMapIterator // must be the first field kv rt.GoSlice // slice of _MapPair ki int } @@ -44,43 +45,43 @@ var ( ) func init() { - if unsafe.Offsetof(_MapIterator{}.it) != 0 { + if unsafe.Offsetof(MapIterator{}.It) != 0 { panic("_MapIterator.it is not the first field") } } -func newIterator() *_MapIterator { +func newIterator() *MapIterator { if v := iteratorPool.Get(); v == nil { - return new(_MapIterator) + return new(MapIterator) } else { - return resetIterator(v.(*_MapIterator)) + return resetIterator(v.(*MapIterator)) } } -func resetIterator(p *_MapIterator) *_MapIterator { +func resetIterator(p *MapIterator) *MapIterator { p.ki = 0 - p.it = rt.GoMapIterator{} + p.It = rt.GoMapIterator{} p.kv.Len = 0 return p } -func (self *_MapIterator) at(i int) *_MapPair { +func (self *MapIterator) at(i int) *_MapPair { return (*_MapPair)(unsafe.Pointer(uintptr(self.kv.Ptr) + uintptr(i) * unsafe.Sizeof(_MapPair{}))) } -func (self *_MapIterator) add() (p *_MapPair) { +func (self *MapIterator) add() (p *_MapPair) { p = self.at(self.kv.Len) self.kv.Len++ return } -func (self *_MapIterator) data() (p []_MapPair) { +func (self *MapIterator) data() (p []_MapPair) { *(*rt.GoSlice)(unsafe.Pointer(&p)) = self.kv return } -func (self *_MapIterator) append(t *rt.GoType, k unsafe.Pointer, v unsafe.Pointer) (err error) { +func (self *MapIterator) append(t *rt.GoType, k unsafe.Pointer, v unsafe.Pointer) (err error) { p := self.add() p.v = v @@ -94,26 +95,26 @@ func (self *_MapIterator) append(t *rt.GoType, k unsafe.Pointer, v unsafe.Pointe return nil } -func (self *_MapIterator) appendGeneric(p *_MapPair, t *rt.GoType, v reflect.Kind, k unsafe.Pointer) error { +func (self *MapIterator) appendGeneric(p *_MapPair, t *rt.GoType, v reflect.Kind, k unsafe.Pointer) error { switch v { - case reflect.Int : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int)(k)))]) ; return nil - case reflect.Int8 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int8)(k)))]) ; return nil - case reflect.Int16 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int16)(k)))]) ; return nil - case reflect.Int32 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int32)(k)))]) ; return nil - case reflect.Int64 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], *(*int64)(k))]) ; return nil - case reflect.Uint : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint)(k)))]) ; return nil - case reflect.Uint8 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint8)(k)))]) ; return nil - case reflect.Uint16 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint16)(k)))]) ; return nil - case reflect.Uint32 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint32)(k)))]) ; return nil - case reflect.Uint64 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], *(*uint64)(k))]) ; return nil - case reflect.Uintptr : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uintptr)(k)))]) ; return nil + case reflect.Int : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int)(k)), 10)) ; return nil + case reflect.Int8 : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int8)(k)), 10)) ; return nil + case reflect.Int16 : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int16)(k)), 10)) ; return nil + case reflect.Int32 : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int32)(k)), 10)) ; return nil + case reflect.Int64 : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int64)(k)), 10)) ; return nil + case reflect.Uint : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint)(k)), 10)) ; return nil + case reflect.Uint8 : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint8)(k)), 10)) ; return nil + case reflect.Uint16 : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint16)(k)), 10)) ; return nil + case reflect.Uint32 : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint32)(k)), 10)) ; return nil + case reflect.Uint64 : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint64)(k)), 10)) ; return nil + case reflect.Uintptr : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uintptr)(k)), 10)) ; return nil case reflect.Interface : return self.appendInterface(p, t, k) case reflect.Struct, reflect.Ptr : return self.appendConcrete(p, t, k) default : panic("unexpected map key type") } } -func (self *_MapIterator) appendConcrete(p *_MapPair, t *rt.GoType, k unsafe.Pointer) (err error) { +func (self *MapIterator) appendConcrete(p *_MapPair, t *rt.GoType, k unsafe.Pointer) (err error) { // compiler has already checked that the type implements the encoding.MarshalText interface if !t.Indirect() { k = *(*unsafe.Pointer)(k) @@ -127,7 +128,7 @@ func (self *_MapIterator) appendConcrete(p *_MapPair, t *rt.GoType, k unsafe.Poi return } -func (self *_MapIterator) appendInterface(p *_MapPair, t *rt.GoType, k unsafe.Pointer) (err error) { +func (self *MapIterator) appendInterface(p *_MapPair, t *rt.GoType, k unsafe.Pointer) (err error) { if len(rt.IfaceType(t).Methods) == 0 { panic("unexpected map key type") } else if p.k, err = asText(k); err == nil { @@ -137,17 +138,17 @@ func (self *_MapIterator) appendInterface(p *_MapPair, t *rt.GoType, k unsafe.Po } } -func iteratorStop(p *_MapIterator) { +func IteratorStop(p *MapIterator) { iteratorPool.Put(p) } -func iteratorNext(p *_MapIterator) { +func IteratorNext(p *MapIterator) { i := p.ki - t := &p.it + t := &p.It /* check for unordered iteration */ if i < 0 { - mapiternext(t) + rt.Mapiternext(t) return } @@ -164,25 +165,25 @@ func iteratorNext(p *_MapIterator) { p.ki++ } -func iteratorStart(t *rt.GoMapType, m *rt.GoMap, fv uint64) (*_MapIterator, error) { +func IteratorStart(t *rt.GoMapType, m *rt.GoMap, fv uint64) (*MapIterator, error) { it := newIterator() - mapiterinit(t, m, &it.it) + rt.Mapiterinit(t, m, &it.It) /* check for key-sorting, empty map don't need sorting */ - if m.Count == 0 || (fv & uint64(SortMapKeys)) == 0 { + if m.Count == 0 || (fv & (1< it.kv.Cap { - it.kv = growslice(iteratorPair, it.kv, m.Count) + it.kv = rt.GrowSlice(iteratorPair, it.kv, m.Count) } /* dump all the key-value pairs */ - for ; it.it.K != nil; mapiternext(&it.it) { - if err := it.append(t.Key, it.it.K, it.it.V); err != nil { - iteratorStop(it) + for ; it.It.K != nil; rt.Mapiternext(&it.It) { + if err := it.append(t.Key, it.It.K, it.It.V); err != nil { + IteratorStop(it) return nil, err } } @@ -193,7 +194,13 @@ func iteratorStart(t *rt.GoMapType, m *rt.GoMap, fv uint64) (*_MapIterator, erro } /* load the first pair into iterator */ - it.it.V = it.at(0).v - it.it.K = unsafe.Pointer(&it.at(0).k) + it.It.V = it.at(0).v + it.It.K = unsafe.Pointer(&it.at(0).k) return it, nil } + +func asText(v unsafe.Pointer) (string, error) { + text := rt.AssertI2I(rt.UnpackType(vars.EncodingTextMarshalerType), *(*rt.GoIface)(v)) + r, e := (*(*encoding.TextMarshaler)(unsafe.Pointer(&text))).MarshalText() + return rt.Mem2Str(r), e +} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/alg/opts.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/opts.go new file mode 100644 index 0000000000000000000000000000000000000000..c19e2de4eec92427da80f54d6286e0f3445f4c34 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/encoder/alg/opts.go @@ -0,0 +1,31 @@ +/** + * Copyright 2024 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package alg + +const ( + BitSortMapKeys = iota + BitEscapeHTML + BitCompactMarshaler + BitNoQuoteTextMarshaler + BitNoNullSliceOrMap + BitValidateString + BitNoValidateJSONMarshaler + BitNoEncoderNewline + BitEncodeNullForInfOrNan + + BitPointerValue = 63 +) diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/alg/primitives.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/primitives.go new file mode 100644 index 0000000000000000000000000000000000000000..63fa01890de7940d330f2028fb0329ed60c9e796 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/encoder/alg/primitives.go @@ -0,0 +1,95 @@ +/** + * Copyright 2024 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package alg + +import ( + "encoding" + "encoding/json" + + "github.com/bytedance/sonic/internal/encoder/vars" + "github.com/bytedance/sonic/internal/rt" +) + +func Compact(p *[]byte, v []byte) error { + buf := vars.NewBuffer() + err := json.Compact(buf, v) + + /* check for errors */ + if err != nil { + return err + } + + /* add to result */ + v = buf.Bytes() + *p = append(*p, v...) + + /* return the buffer into pool */ + vars.FreeBuffer(buf) + return nil +} + +func EncodeNil(rb *[]byte) error { + *rb = append(*rb, 'n', 'u', 'l', 'l') + return nil +} + +// func Make_EncodeTypedPointer(computor func(*rt.GoType, ...interface{}) (interface{}, error)) func(*[]byte, *rt.GoType, *unsafe.Pointer, *vars.Stack, uint64) error { +// return func(buf *[]byte, vt *rt.GoType, vp *unsafe.Pointer, sb *vars.Stack, fv uint64) error { +// if vt == nil { +// return EncodeNil(buf) +// } else if fn, err := vars.FindOrCompile(vt, (fv&(1< 0 { + // output buffer + dp := unsafe.Pointer(uintptr(b.Ptr) + uintptr(b.Len)) + dn := b.Cap - b.Len + // call native.Quote, dn is byte count it outputs + opts := uint64(0) + if double { + opts = types.F_DOUBLE_UNQUOTE + } + ret := native.Quote(sp, nb, dp, &dn, opts) + // update *buf length + b.Len += dn + + // no need more output + if ret >= 0 { + break + } + + // double buf size + *b = rt.GrowSlice(typeByte, *b, b.Cap*2) + // ret is the complement of consumed input + ret = ^ret + // update input buffer + nb -= ret + sp = unsafe.Pointer(uintptr(sp) + uintptr(ret)) + } + + runtime.KeepAlive(buf) + runtime.KeepAlive(sp) + if double { + buf = append(buf, `\""`...) + } else { + buf = append(buf, `"`...) + } + + return buf +} + +func HtmlEscape(dst []byte, src []byte) []byte { + var sidx int + + dst = append(dst, src[:0]...) // avoid check nil dst + sbuf := (*rt.GoSlice)(unsafe.Pointer(&src)) + dbuf := (*rt.GoSlice)(unsafe.Pointer(&dst)) + + /* grow dst if it is shorter */ + if cap(dst)-len(dst) < len(src)+types.BufPaddingSize { + cap := len(src)*3/2 + types.BufPaddingSize + *dbuf = rt.GrowSlice(typeByte, *dbuf, cap) + } + + for sidx < sbuf.Len { + sp := rt.Add(sbuf.Ptr, uintptr(sidx)) + dp := rt.Add(dbuf.Ptr, uintptr(dbuf.Len)) + + sn := sbuf.Len - sidx + dn := dbuf.Cap - dbuf.Len + nb := native.HTMLEscape(sp, sn, dp, &dn) + + /* check for errors */ + if dbuf.Len += dn; nb >= 0 { + break + } + + /* not enough space, grow the slice and try again */ + sidx += ^nb + *dbuf = rt.GrowSlice(typeByte, *dbuf, dbuf.Cap*2) + } + return dst +} + +func F64toa(buf []byte, v float64) ([]byte) { + if v == 0 { + return append(buf, '0') + } + buf = rt.GuardSlice2(buf, 64) + ret := native.F64toa((*byte)(rt.IndexByte(buf, len(buf))), v) + if ret > 0 { + return buf[:len(buf)+ret] + } else { + return buf + } +} + +func F32toa(buf []byte, v float32) ([]byte) { + if v == 0 { + return append(buf, '0') + } + buf = rt.GuardSlice2(buf, 64) + ret := native.F32toa((*byte)(rt.IndexByte(buf, len(buf))), v) + if ret > 0 { + return buf[:len(buf)+ret] + } else { + return buf + } +} + +func I64toa(buf []byte, v int64) ([]byte) { + buf = rt.GuardSlice2(buf, 32) + ret := native.I64toa((*byte)(rt.IndexByte(buf, len(buf))), v) + if ret > 0 { + return buf[:len(buf)+ret] + } else { + return buf + } +} + +func U64toa(buf []byte, v uint64) ([]byte) { + buf = rt.GuardSlice2(buf, 32) + ret := native.U64toa((*byte)(rt.IndexByte(buf, len(buf))), v) + if ret > 0 { + return buf[:len(buf)+ret] + } else { + return buf + } +} + diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/alg/spec_compat.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/spec_compat.go new file mode 100644 index 0000000000000000000000000000000000000000..c15cbf7d80fe68e8377bb142ed1bc384408d7e5f --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/encoder/alg/spec_compat.go @@ -0,0 +1,148 @@ +// +build !amd64,!arm64 go1.24 !go1.16 arm64,!go1.20 + +/** + * Copyright 2024 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package alg + +import ( + _ "unsafe" + "unicode/utf8" + "strconv" + "bytes" + "encoding/json" + + "github.com/bytedance/sonic/internal/rt" +) + +// Valid validates json and returns first non-blank character position, +// if it is only one valid json value. +// Otherwise returns invalid character position using start. +// +// Note: it does not check for the invalid UTF-8 characters. +func Valid(data []byte) (ok bool, start int) { + ok = json.Valid(data) + return ok, 0 +} + +var typeByte = rt.UnpackEface(byte(0)).Type + +func Quote(e []byte, s string, double bool) []byte { + if len(s) == 0 { + if double { + return append(e, `"\"\""`...) + } + return append(e, `""`...) + } + + b := e + ss := len(e) + e = append(e, '"') + start := 0 + + for i := 0; i < len(s); { + if b := s[i]; b < utf8.RuneSelf { + if rt.SafeSet[b] { + i++ + continue + } + if start < i { + e = append(e, s[start:i]...) + } + e = append(e, '\\') + switch b { + case '\\', '"': + e = append(e, b) + case '\n': + e = append(e, 'n') + case '\r': + e = append(e, 'r') + case '\t': + e = append(e, 't') + default: + // This encodes bytes < 0x20 except for \t, \n and \r. + // If escapeHTML is set, it also escapes <, >, and & + // because they can lead to security holes when + // user-controlled strings are rendered into JSON + // and served to some browsers. + e = append(e, `u00`...) + e = append(e, rt.Hex[b>>4]) + e = append(e, rt.Hex[b&0xF]) + } + i++ + start = i + continue + } + c, size := utf8.DecodeRuneInString(s[i:]) + // if correct && c == utf8.RuneError && size == 1 { + // if start < i { + // e = append(e, s[start:i]...) + // } + // e = append(e, `\ufffd`...) + // i += size + // start = i + // continue + // } + if c == '\u2028' || c == '\u2029' { + if start < i { + e = append(e, s[start:i]...) + } + e = append(e, `\u202`...) + e = append(e, rt.Hex[c&0xF]) + i += size + start = i + continue + } + i += size + } + + if start < len(s) { + e = append(e, s[start:]...) + } + e = append(e, '"') + + if double { + return strconv.AppendQuote(b, string(e[ss:])) + } else { + return e + } +} + +func HtmlEscape(dst []byte, src []byte) []byte { + buf := bytes.NewBuffer(dst) + json.HTMLEscape(buf, src) + return buf.Bytes() +} + +func F64toa(buf []byte, v float64) ([]byte) { + bs := bytes.NewBuffer(buf) + _ = json.NewEncoder(bs).Encode(v) + return bs.Bytes() +} + +func F32toa(buf []byte, v float32) ([]byte) { + bs := bytes.NewBuffer(buf) + _ = json.NewEncoder(bs).Encode(v) + return bs.Bytes() +} + +func I64toa(buf []byte, v int64) ([]byte) { + return strconv.AppendInt(buf, int64(v), 10) +} + +func U64toa(buf []byte, v uint64) ([]byte) { + return strconv.AppendUint(buf, v, 10) +} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/assembler_amd64_go116.go b/vendor/github.com/bytedance/sonic/internal/encoder/assembler_amd64_go116.go deleted file mode 100644 index d056259f2b9885bc9e9280ca06da9697396e11c6..0000000000000000000000000000000000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/assembler_amd64_go116.go +++ /dev/null @@ -1,1199 +0,0 @@ -// +build go1.15,!go1.17 - -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package encoder - -import ( - `fmt` - `reflect` - `strconv` - `unsafe` - - `github.com/bytedance/sonic/internal/cpu` - `github.com/bytedance/sonic/internal/jit` - `github.com/bytedance/sonic/internal/native/types` - `github.com/twitchyliquid64/golang-asm/obj` - `github.com/twitchyliquid64/golang-asm/obj/x86` - - `github.com/bytedance/sonic/internal/native` - `github.com/bytedance/sonic/internal/rt` -) - -/** Register Allocations - * - * State Registers: - * - * %rbx : stack base - * %rdi : result pointer - * %rsi : result length - * %rdx : result capacity - * %r12 : sp->p - * %r13 : sp->q - * %r14 : sp->x - * %r15 : sp->f - * - * Error Registers: - * - * %r10 : error type register - * %r11 : error pointer register - */ - -/** Function Prototype & Stack Map - * - * func (buf *[]byte, p unsafe.Pointer, sb *_Stack, fv uint64) (err error) - * - * buf : (FP) - * p : 8(FP) - * sb : 16(FP) - * fv : 24(FP) - * err.vt : 32(FP) - * err.vp : 40(FP) - */ - -const ( - _S_cond = iota - _S_init -) - -const ( - _FP_args = 48 // 48 bytes for passing arguments to this function - _FP_fargs = 64 // 64 bytes for passing arguments to other Go functions - _FP_saves = 64 // 64 bytes for saving the registers before CALL instructions - _FP_locals = 24 // 24 bytes for local variables -) - -const ( - _FP_offs = _FP_fargs + _FP_saves + _FP_locals - _FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer - _FP_base = _FP_size + 8 // 8 bytes for the return address -) - -const ( - _FM_exp32 = 0x7f800000 - _FM_exp64 = 0x7ff0000000000000 -) - -const ( - _IM_null = 0x6c6c756e // 'null' - _IM_true = 0x65757274 // 'true' - _IM_fals = 0x736c6166 // 'fals' ('false' without the 'e') - _IM_open = 0x00225c22 // '"\"∅' - _IM_array = 0x5d5b // '[]' - _IM_object = 0x7d7b // '{}' - _IM_mulv = -0x5555555555555555 -) - -const ( - _LB_more_space = "_more_space" - _LB_more_space_return = "_more_space_return_" -) - -const ( - _LB_error = "_error" - _LB_error_too_deep = "_error_too_deep" - _LB_error_invalid_number = "_error_invalid_number" - _LB_error_nan_or_infinite = "_error_nan_or_infinite" - _LB_panic = "_panic" -) - -var ( - _AX = jit.Reg("AX") - _CX = jit.Reg("CX") - _DX = jit.Reg("DX") - _DI = jit.Reg("DI") - _SI = jit.Reg("SI") - _BP = jit.Reg("BP") - _SP = jit.Reg("SP") - _R8 = jit.Reg("R8") -) - -var ( - _X0 = jit.Reg("X0") - _Y0 = jit.Reg("Y0") -) - -var ( - _ST = jit.Reg("BX") - _RP = jit.Reg("DI") - _RL = jit.Reg("SI") - _RC = jit.Reg("DX") -) - -var ( - _LR = jit.Reg("R9") - _R10 = jit.Reg("R10") // used for gcWriterBarrier - _ET = jit.Reg("R10") - _EP = jit.Reg("R11") -) - -var ( - _SP_p = jit.Reg("R12") - _SP_q = jit.Reg("R13") - _SP_x = jit.Reg("R14") - _SP_f = jit.Reg("R15") -) - -var ( - _ARG_rb = jit.Ptr(_SP, _FP_base) - _ARG_vp = jit.Ptr(_SP, _FP_base + 8) - _ARG_sb = jit.Ptr(_SP, _FP_base + 16) - _ARG_fv = jit.Ptr(_SP, _FP_base + 24) -) - -var ( - _RET_et = jit.Ptr(_SP, _FP_base + 32) - _RET_ep = jit.Ptr(_SP, _FP_base + 40) -) - -var ( - _VAR_sp = jit.Ptr(_SP, _FP_fargs + _FP_saves) - _VAR_dn = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8) - _VAR_vp = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16) -) - -var ( - _REG_ffi = []obj.Addr{_RP, _RL, _RC} - _REG_enc = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RL} - _REG_jsr = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _LR} - _REG_all = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RP, _RL, _RC} -) - -type _Assembler struct { - jit.BaseAssembler - p _Program - x int - name string -} - -func newAssembler(p _Program) *_Assembler { - return new(_Assembler).Init(p) -} - -/** Assembler Interface **/ -func (self *_Assembler) Load() _Encoder { - return ptoenc(self.BaseAssembler.Load("encode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs)) -} - -func (self *_Assembler) Init(p _Program) *_Assembler { - self.p = p - self.BaseAssembler.Init(self.compile) - return self -} - -func (self *_Assembler) compile() { - self.prologue() - self.instrs() - self.epilogue() - self.builtins() -} - -/** Assembler Stages **/ - -var _OpFuncTab = [256]func(*_Assembler, *_Instr) { - _OP_null : (*_Assembler)._asm_OP_null, - _OP_empty_arr : (*_Assembler)._asm_OP_empty_arr, - _OP_empty_obj : (*_Assembler)._asm_OP_empty_obj, - _OP_bool : (*_Assembler)._asm_OP_bool, - _OP_i8 : (*_Assembler)._asm_OP_i8, - _OP_i16 : (*_Assembler)._asm_OP_i16, - _OP_i32 : (*_Assembler)._asm_OP_i32, - _OP_i64 : (*_Assembler)._asm_OP_i64, - _OP_u8 : (*_Assembler)._asm_OP_u8, - _OP_u16 : (*_Assembler)._asm_OP_u16, - _OP_u32 : (*_Assembler)._asm_OP_u32, - _OP_u64 : (*_Assembler)._asm_OP_u64, - _OP_f32 : (*_Assembler)._asm_OP_f32, - _OP_f64 : (*_Assembler)._asm_OP_f64, - _OP_str : (*_Assembler)._asm_OP_str, - _OP_bin : (*_Assembler)._asm_OP_bin, - _OP_quote : (*_Assembler)._asm_OP_quote, - _OP_number : (*_Assembler)._asm_OP_number, - _OP_eface : (*_Assembler)._asm_OP_eface, - _OP_iface : (*_Assembler)._asm_OP_iface, - _OP_byte : (*_Assembler)._asm_OP_byte, - _OP_text : (*_Assembler)._asm_OP_text, - _OP_deref : (*_Assembler)._asm_OP_deref, - _OP_index : (*_Assembler)._asm_OP_index, - _OP_load : (*_Assembler)._asm_OP_load, - _OP_save : (*_Assembler)._asm_OP_save, - _OP_drop : (*_Assembler)._asm_OP_drop, - _OP_drop_2 : (*_Assembler)._asm_OP_drop_2, - _OP_recurse : (*_Assembler)._asm_OP_recurse, - _OP_is_nil : (*_Assembler)._asm_OP_is_nil, - _OP_is_nil_p1 : (*_Assembler)._asm_OP_is_nil_p1, - _OP_is_zero_1 : (*_Assembler)._asm_OP_is_zero_1, - _OP_is_zero_2 : (*_Assembler)._asm_OP_is_zero_2, - _OP_is_zero_4 : (*_Assembler)._asm_OP_is_zero_4, - _OP_is_zero_8 : (*_Assembler)._asm_OP_is_zero_8, - _OP_is_zero_map : (*_Assembler)._asm_OP_is_zero_map, - _OP_goto : (*_Assembler)._asm_OP_goto, - _OP_map_iter : (*_Assembler)._asm_OP_map_iter, - _OP_map_stop : (*_Assembler)._asm_OP_map_stop, - _OP_map_check_key : (*_Assembler)._asm_OP_map_check_key, - _OP_map_write_key : (*_Assembler)._asm_OP_map_write_key, - _OP_map_value_next : (*_Assembler)._asm_OP_map_value_next, - _OP_slice_len : (*_Assembler)._asm_OP_slice_len, - _OP_slice_next : (*_Assembler)._asm_OP_slice_next, - _OP_marshal : (*_Assembler)._asm_OP_marshal, - _OP_marshal_p : (*_Assembler)._asm_OP_marshal_p, - _OP_marshal_text : (*_Assembler)._asm_OP_marshal_text, - _OP_marshal_text_p : (*_Assembler)._asm_OP_marshal_text_p, - _OP_cond_set : (*_Assembler)._asm_OP_cond_set, - _OP_cond_testc : (*_Assembler)._asm_OP_cond_testc, -} - -func (self *_Assembler) instr(v *_Instr) { - if fn := _OpFuncTab[v.op()]; fn != nil { - fn(self, v) - } else { - panic(fmt.Sprintf("invalid opcode: %d", v.op())) - } -} - -func (self *_Assembler) instrs() { - for i, v := range self.p { - self.Mark(i) - self.instr(&v) - self.debug_instr(i, &v) - } -} - -func (self *_Assembler) builtins() { - self.more_space() - self.error_too_deep() - self.error_invalid_number() - self.error_nan_or_infinite() - self.go_panic() -} - -func (self *_Assembler) epilogue() { - self.Mark(len(self.p)) - self.Emit("XORL", _ET, _ET) - self.Emit("XORL", _EP, _EP) - self.Link(_LB_error) - self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX - self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX) - self.Emit("MOVQ", _ET, _RET_et) // MOVQ ET, et<>+24(FP) - self.Emit("MOVQ", _EP, _RET_ep) // MOVQ EP, ep<>+32(FP) - self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP - self.Emit("ADDQ", jit.Imm(_FP_size), _SP) // ADDQ $_FP_size, SP - self.Emit("RET") // RET -} - -func (self *_Assembler) prologue() { - self.Emit("SUBQ", jit.Imm(_FP_size), _SP) // SUBQ $_FP_size, SP - self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP) - self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP - self.load_buffer() // LOAD {buf} - self.Emit("MOVQ", _ARG_vp, _SP_p) // MOVQ vp<>+8(FP), SP.p - self.Emit("MOVQ", _ARG_sb, _ST) // MOVQ sb<>+16(FP), ST - self.Emit("XORL", _SP_x, _SP_x) // XORL SP.x, SP.x - self.Emit("XORL", _SP_f, _SP_f) // XORL SP.f, SP.f - self.Emit("XORL", _SP_q, _SP_q) // XORL SP.q, SP.q -} - -/** Assembler Inline Functions **/ - -func (self *_Assembler) xsave(reg ...obj.Addr) { - for i, v := range reg { - if i > _FP_saves / 8 - 1 { - panic("too many registers to save") - } else { - self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8)) - } - } -} - -func (self *_Assembler) xload(reg ...obj.Addr) { - for i, v := range reg { - if i > _FP_saves / 8 - 1 { - panic("too many registers to load") - } else { - self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v) - } - } -} - -func (self *_Assembler) rbuf_di() { - if _RP.Reg != x86.REG_DI { - panic("register allocation messed up: RP != DI") - } else { - self.Emit("ADDQ", _RL, _RP) - } -} - -func (self *_Assembler) store_int(nd int, fn obj.Addr, ins string) { - self.check_size(nd) - self.save_c() // SAVE $C_regs - self.rbuf_di() // MOVQ RP, DI - self.Emit(ins, jit.Ptr(_SP_p, 0), _SI) // $ins (SP.p), SI - self.call_c(fn) // CALL_C $fn - self.Emit("ADDQ", _AX, _RL) // ADDQ AX, RL -} - -func (self *_Assembler) store_str(s string) { - i := 0 - m := rt.Str2Mem(s) - - /* 8-byte stores */ - for i <= len(m) - 8 { - self.Emit("MOVQ", jit.Imm(rt.Get64(m[i:])), _AX) // MOVQ $s[i:], AX - self.Emit("MOVQ", _AX, jit.Sib(_RP, _RL, 1, int64(i))) // MOVQ AX, i(RP)(RL) - i += 8 - } - - /* 4-byte stores */ - if i <= len(m) - 4 { - self.Emit("MOVL", jit.Imm(int64(rt.Get32(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVL $s[i:], i(RP)(RL) - i += 4 - } - - /* 2-byte stores */ - if i <= len(m) - 2 { - self.Emit("MOVW", jit.Imm(int64(rt.Get16(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVW $s[i:], i(RP)(RL) - i += 2 - } - - /* last byte */ - if i < len(m) { - self.Emit("MOVB", jit.Imm(int64(m[i])), jit.Sib(_RP, _RL, 1, int64(i))) // MOVB $s[i:], i(RP)(RL) - } -} - -func (self *_Assembler) check_size(n int) { - self.check_size_rl(jit.Ptr(_RL, int64(n))) -} - -func (self *_Assembler) check_size_r(r obj.Addr, d int) { - self.check_size_rl(jit.Sib(_RL, r, 1, int64(d))) -} - -func (self *_Assembler) check_size_rl(v obj.Addr) { - idx := self.x - key := _LB_more_space_return + strconv.Itoa(idx) - - /* the following code relies on LR == R9 to work */ - if _LR.Reg != x86.REG_R9 { - panic("register allocation messed up: LR != R9") - } - - /* check for buffer capacity */ - self.x++ - self.Emit("LEAQ", v, _AX) // LEAQ $v, AX - self.Emit("CMPQ", _AX, _RC) // CMPQ AX, RC - self.Sjmp("JBE" , key) // JBE _more_space_return_{n} - self.slice_grow_ax(key) // GROW $key - self.Link(key) // _more_space_return_{n}: -} - -func (self *_Assembler) slice_grow_ax(ret string) { - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ ?(PC), R9 - self.Sref(ret, 4) // .... &ret - self.Sjmp("JMP" , _LB_more_space) // JMP _more_space -} - -/** State Stack Helpers **/ - -const ( - _StateSize = int64(unsafe.Sizeof(_State{})) - _StackLimit = _MaxStack * _StateSize -) - -func (self *_Assembler) save_state() { - self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX) // MOVQ (ST), CX - self.Emit("LEAQ", jit.Ptr(_CX, _StateSize), _R8) // LEAQ _StateSize(CX), R8 - self.Emit("CMPQ", _R8, jit.Imm(_StackLimit)) // CMPQ R8, $_StackLimit - self.Sjmp("JAE" , _LB_error_too_deep) // JA _error_too_deep - self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8)) // MOVQ SP.x, 8(ST)(CX) - self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16)) // MOVQ SP.f, 16(ST)(CX) - self.WriteRecNotAX(0, _SP_p, jit.Sib(_ST, _CX, 1, 24)) // MOVQ SP.p, 24(ST)(CX) - self.WriteRecNotAX(1, _SP_q, jit.Sib(_ST, _CX, 1, 32)) // MOVQ SP.q, 32(ST)(CX) - self.Emit("MOVQ", _R8, jit.Ptr(_ST, 0)) // MOVQ R8, (ST) -} - -func (self *_Assembler) drop_state(decr int64) { - self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX - self.Emit("SUBQ" , jit.Imm(decr), _AX) // SUBQ $decr, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST) - self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _SP_x) // MOVQ 8(ST)(AX), SP.x - self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 16), _SP_f) // MOVQ 16(ST)(AX), SP.f - self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 24), _SP_p) // MOVQ 24(ST)(AX), SP.p - self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 32), _SP_q) // MOVQ 32(ST)(AX), SP.q - self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0 - self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX) - self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 24)) // MOVOU X0, 24(ST)(AX) -} - -/** Buffer Helpers **/ - -func (self *_Assembler) add_char(ch byte) { - self.Emit("MOVB", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVB $ch, (RP)(RL) - self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL -} - -func (self *_Assembler) add_long(ch uint32, n int64) { - self.Emit("MOVL", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVL $ch, (RP)(RL) - self.Emit("ADDQ", jit.Imm(n), _RL) // ADDQ $n, RL -} - -func (self *_Assembler) add_text(ss string) { - self.store_str(ss) // TEXT $ss - self.Emit("ADDQ", jit.Imm(int64(len(ss))), _RL) // ADDQ ${len(ss)}, RL -} - -func (self *_Assembler) prep_buffer() { - self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX - self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) -} - -func (self *_Assembler) prep_buffer_c() { - self.Emit("MOVQ", _ARG_rb, _DI) // MOVQ rb<>+0(FP), DI - self.Emit("MOVQ", _RL, jit.Ptr(_DI, 8)) // MOVQ RL, 8(DI) -} - -func (self *_Assembler) save_buffer() { - self.Emit("MOVQ", _ARG_rb, _CX) // MOVQ rb<>+0(FP), CX - self.Emit("MOVQ", _RP, jit.Ptr(_CX, 0)) // MOVQ RP, (CX) - self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8)) // MOVQ RL, 8(CX) - self.Emit("MOVQ", _RC, jit.Ptr(_CX, 16)) // MOVQ RC, 16(CX) -} - -func (self *_Assembler) load_buffer() { - self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX - self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP) // MOVQ (AX), RP - self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL) // MOVQ 8(AX), RL - self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), RC -} - -/** Function Interface Helpers **/ - -func (self *_Assembler) call(pc obj.Addr) { - self.Emit("MOVQ", pc, _AX) // MOVQ $pc, AX - self.Rjmp("CALL", _AX) // CALL AX -} - -func (self *_Assembler) save_c() { - self.xsave(_REG_ffi...) // SAVE $REG_ffi -} - -func (self *_Assembler) call_c(pc obj.Addr) { - self.call(pc) // CALL $pc - self.xload(_REG_ffi...) // LOAD $REG_ffi -} - -func (self *_Assembler) call_go(pc obj.Addr) { - self.xsave(_REG_all...) // SAVE $REG_all - self.call(pc) // CALL $pc - self.xload(_REG_all...) // LOAD $REG_all -} - -func (self *_Assembler) call_encoder(pc obj.Addr) { - self.xsave(_REG_enc...) // SAVE $REG_enc - self.call(pc) // CALL $pc - self.xload(_REG_enc...) // LOAD $REG_enc - self.load_buffer() // LOAD {buf} -} - -func (self *_Assembler) call_marshaler(fn obj.Addr, it *rt.GoType, vt reflect.Type) { - switch vt.Kind() { - case reflect.Interface : self.call_marshaler_i(fn, it) - case reflect.Ptr, reflect.Map: self.call_marshaler_v(fn, it, vt, true) - // struct/array of 1 direct iface type can be direct - default : self.call_marshaler_v(fn, it, vt, !rt.UnpackType(vt).Indirect()) - } -} - -func (self *_Assembler) call_marshaler_i(fn obj.Addr, it *rt.GoType) { - self.Emit("MOVQ" , jit.Gtype(it), _AX) // MOVQ $it, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX) // MOVQ 8(SP.p), CX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JZ" , "_null_{n}") // JZ _null_{n} - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 16)) // MOVQ CX, 16(SP) - self.call_go(_F_assertI2I) // CALL_GO assertI2I - self.prep_buffer() // MOVE {buf}, (SP) - self.Emit("MOVOU", jit.Ptr(_SP, 24), _X0) // MOVOU 24(SP), X0 - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP) - self.Emit("MOVQ", _ARG_fv, _CX) // MOVQ ARG.fv, CX - self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24)) // MOVQ CX, 24(SP) - self.call_encoder(fn) // CALL $fn - self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error - self.Sjmp("JMP" , "_done_{n}") // JMP _done_{n} - self.Link("_null_{n}") // _null_{n}: - self.check_size(4) // SIZE $4 - self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL - self.Link("_done_{n}") // _done_{n}: -} - -func (self *_Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.Type, deref bool) { - self.prep_buffer() // MOVE {buf}, (SP) - self.Emit("MOVQ", jit.Itab(it, vt), _AX) // MOVQ $(itab(it, vt)), AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - - /* dereference the pointer if needed */ - if !deref { - self.Emit("MOVQ", _SP_p, jit.Ptr(_SP, 16)) // MOVQ SP.p, 16(SP) - } else { - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP) - } - - /* call the encoder, and perform error checks */ - self.Emit("MOVQ", _ARG_fv, _CX) // MOVQ ARG.fv, CX - self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24)) // MOVQ CX, 24(SP) - self.call_encoder(fn) // CALL $fn - self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error -} - -/** Builtin: _more_space **/ - -var ( - _T_byte = jit.Type(byteType) - _F_growslice = jit.Func(growslice) -) - -func (self *_Assembler) more_space() { - self.Link(_LB_more_space) - self.Emit("MOVQ", _T_byte, jit.Ptr(_SP, 0)) // MOVQ $_T_byte, (SP) - self.Emit("MOVQ", _RP, jit.Ptr(_SP, 8)) // MOVQ RP, 8(SP) - self.Emit("MOVQ", _RL, jit.Ptr(_SP, 16)) // MOVQ RL, 16(SP) - self.Emit("MOVQ", _RC, jit.Ptr(_SP, 24)) // MOVQ RC, 24(SP) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP) - self.xsave(_REG_jsr...) // SAVE $REG_jsr - self.call(_F_growslice) // CALL $pc - self.xload(_REG_jsr...) // LOAD $REG_jsr - self.Emit("MOVQ", jit.Ptr(_SP, 40), _RP) // MOVQ 40(SP), RP - self.Emit("MOVQ", jit.Ptr(_SP, 48), _RL) // MOVQ 48(SP), RL - self.Emit("MOVQ", jit.Ptr(_SP, 56), _RC) // MOVQ 56(SP), RC - self.save_buffer() // SAVE {buf} - self.Rjmp("JMP" , _LR) // JMP LR -} - -/** Builtin Errors **/ - -var ( - _V_ERR_too_deep = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_too_deep)))) - _V_ERR_nan_or_infinite = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_nan_or_infinite)))) - _I_json_UnsupportedValueError = jit.Itab(rt.UnpackType(errorType), jsonUnsupportedValueType) -) - -func (self *_Assembler) error_too_deep() { - self.Link(_LB_error_too_deep) - self.Emit("MOVQ", _V_ERR_too_deep, _EP) // MOVQ $_V_ERR_too_deep, EP - self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -func (self *_Assembler) error_invalid_number() { - self.Link(_LB_error_invalid_number) - self.call_go(_F_error_number) // CALL_GO error_number - self.Emit("MOVQ", jit.Ptr(_SP, 16), _ET) // MOVQ 16(SP), ET - self.Emit("MOVQ", jit.Ptr(_SP, 24), _EP) // MOVQ 24(SP), EP - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -func (self *_Assembler) error_nan_or_infinite() { - self.Link(_LB_error_nan_or_infinite) - self.Emit("MOVQ", _V_ERR_nan_or_infinite, _EP) // MOVQ $_V_ERR_nan_or_infinite, EP - self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -/** String Encoding Routine **/ - -var ( - _F_quote = jit.Imm(int64(native.S_quote)) - _F_panic = jit.Func(goPanic) -) - -func (self *_Assembler) go_panic() { - self.Link(_LB_panic) - self.Emit("MOVQ", _SP_p, jit.Ptr(_SP, 8)) - self.call_go(_F_panic) -} - -func (self *_Assembler) encode_string(doubleQuote bool) { - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JZ" , "_str_empty_{n}") // JZ _str_empty_{n} - self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) - self.Sjmp("JNE" , "_str_next_{n}") - self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), jit.Ptr(_SP, 0)) - self.Sjmp("JMP", _LB_panic) - self.Link("_str_next_{n}") - - /* openning quote, check for double quote */ - if !doubleQuote { - self.check_size_r(_AX, 2) // SIZE $2 - self.add_char('"') // CHAR $'"' - } else { - self.check_size_r(_AX, 6) // SIZE $6 - self.add_long(_IM_open, 3) // TEXT $`"\"` - } - - /* quoting loop */ - self.Emit("XORL", _AX, _AX) // XORL AX, AX - self.Emit("MOVQ", _AX, _VAR_sp) // MOVQ AX, sp - self.Link("_str_loop_{n}") // _str_loop_{n}: - self.save_c() // SAVE $REG_ffi - - /* load the output buffer first, and then input buffer, - * because the parameter registers collide with RP / RL / RC */ - self.Emit("MOVQ", _RC, _CX) // MOVQ RC, CX - self.Emit("SUBQ", _RL, _CX) // SUBQ RL, CX - self.Emit("MOVQ", _CX, _VAR_dn) // MOVQ CX, dn - self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _DX) // LEAQ (RP)(RL), DX - self.Emit("LEAQ", _VAR_dn, _CX) // LEAQ dn, CX - self.Emit("MOVQ", _VAR_sp, _AX) // MOVQ sp, AX - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _DI) // MOVQ (SP.p), DI - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SI) // MOVQ 8(SP.p), SI - self.Emit("ADDQ", _AX, _DI) // ADDQ AX, DI - self.Emit("SUBQ", _AX, _SI) // SUBQ AX, SI - - /* set the flags based on `doubleQuote` */ - if !doubleQuote { - self.Emit("XORL", _R8, _R8) // XORL R8, R8 - } else { - self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8 - } - - /* call the native quoter */ - self.call_c(_F_quote) // CALL quote - self.Emit("ADDQ" , _VAR_dn, _RL) // ADDQ dn, RL - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , "_str_space_{n}") // JS _str_space_{n} - - /* close the string, check for double quote */ - if !doubleQuote { - self.check_size(1) // SIZE $1 - self.add_char('"') // CHAR $'"' - self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n} - } else { - self.check_size(3) // SIZE $3 - self.add_text("\\\"\"") // TEXT $'\""' - self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n} - } - - /* not enough space to contain the quoted string */ - self.Link("_str_space_{n}") // _str_space_{n}: - self.Emit("NOTQ", _AX) // NOTQ AX - self.Emit("ADDQ", _AX, _VAR_sp) // ADDQ AX, sp - self.Emit("LEAQ", jit.Sib(_RC, _RC, 1, 0), _AX) // LEAQ (RC)(RC), AX - self.slice_grow_ax("_str_loop_{n}") // GROW _str_loop_{n} - - /* empty string, check for double quote */ - if !doubleQuote { - self.Link("_str_empty_{n}") // _str_empty_{n}: - self.check_size(2) // SIZE $2 - self.add_text("\"\"") // TEXT $'""' - self.Link("_str_end_{n}") // _str_end_{n}: - } else { - self.Link("_str_empty_{n}") // _str_empty_{n}: - self.check_size(6) // SIZE $6 - self.add_text("\"\\\"\\\"\"") // TEXT $'"\"\""' - self.Link("_str_end_{n}") // _str_end_{n}: - } -} - -/** OpCode Assembler Functions **/ - -var ( - _T_json_Marshaler = rt.UnpackType(jsonMarshalerType) - _T_encoding_TextMarshaler = rt.UnpackType(encodingTextMarshalerType) -) - -var ( - _F_f64toa = jit.Imm(int64(native.S_f64toa)) - _F_f32toa = jit.Imm(int64(native.S_f32toa)) - _F_i64toa = jit.Imm(int64(native.S_i64toa)) - _F_u64toa = jit.Imm(int64(native.S_u64toa)) - _F_b64encode = jit.Imm(int64(_subr__b64encode)) -) - -var ( - _F_memmove = jit.Func(memmove) - _F_error_number = jit.Func(error_number) - _F_isValidNumber = jit.Func(isValidNumber) -) - -var ( - _F_iteratorStop = jit.Func(iteratorStop) - _F_iteratorNext = jit.Func(iteratorNext) - _F_iteratorStart = jit.Func(iteratorStart) -) - -var ( - _F_encodeTypedPointer obj.Addr - _F_encodeJsonMarshaler obj.Addr - _F_encodeTextMarshaler obj.Addr -) - -const ( - _MODE_AVX2 = 1 << 2 -) - -func init() { - _F_encodeTypedPointer = jit.Func(encodeTypedPointer) - _F_encodeJsonMarshaler = jit.Func(encodeJsonMarshaler) - _F_encodeTextMarshaler = jit.Func(encodeTextMarshaler) -} - -func (self *_Assembler) _asm_OP_null(_ *_Instr) { - self.check_size(4) - self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL -} - -func (self *_Assembler) _asm_OP_empty_arr(_ *_Instr) { - self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv) - self.Sjmp("JC", "_empty_arr_{n}") - self._asm_OP_null(nil) - self.Sjmp("JMP", "_empty_arr_end_{n}") - self.Link("_empty_arr_{n}") - self.check_size(2) - self.Emit("MOVW", jit.Imm(_IM_array), jit.Sib(_RP, _RL, 1, 0)) - self.Emit("ADDQ", jit.Imm(2), _RL) - self.Link("_empty_arr_end_{n}") -} - -func (self *_Assembler) _asm_OP_empty_obj(_ *_Instr) { - self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv) - self.Sjmp("JC", "_empty_obj_{n}") - self._asm_OP_null(nil) - self.Sjmp("JMP", "_empty_obj_end_{n}") - self.Link("_empty_obj_{n}") - self.check_size(2) - self.Emit("MOVW", jit.Imm(_IM_object), jit.Sib(_RP, _RL, 1, 0)) - self.Emit("ADDQ", jit.Imm(2), _RL) - self.Link("_empty_obj_end_{n}") -} - -func (self *_Assembler) _asm_OP_bool(_ *_Instr) { - self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0 - self.Sjmp("JE" , "_false_{n}") // JE _false_{n} - self.check_size(4) // SIZE $4 - self.Emit("MOVL", jit.Imm(_IM_true), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'true', (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL - self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n} - self.Link("_false_{n}") // _false_{n}: - self.check_size(5) // SIZE $5 - self.Emit("MOVL", jit.Imm(_IM_fals), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'fals', (RP)(RL*1) - self.Emit("MOVB", jit.Imm('e'), jit.Sib(_RP, _RL, 1, 4)) // MOVB $'e', 4(RP)(RL*1) - self.Emit("ADDQ", jit.Imm(5), _RL) // ADDQ $5, RL - self.Link("_end_{n}") // _end_{n}: -} - -func (self *_Assembler) _asm_OP_i8(_ *_Instr) { - self.store_int(4, _F_i64toa, "MOVBQSX") -} - -func (self *_Assembler) _asm_OP_i16(_ *_Instr) { - self.store_int(6, _F_i64toa, "MOVWQSX") -} - -func (self *_Assembler) _asm_OP_i32(_ *_Instr) { - self.store_int(17, _F_i64toa, "MOVLQSX") -} - -func (self *_Assembler) _asm_OP_i64(_ *_Instr) { - self.store_int(21, _F_i64toa, "MOVQ") -} - -func (self *_Assembler) _asm_OP_u8(_ *_Instr) { - self.store_int(3, _F_u64toa, "MOVBQZX") -} - -func (self *_Assembler) _asm_OP_u16(_ *_Instr) { - self.store_int(5, _F_u64toa, "MOVWQZX") -} - -func (self *_Assembler) _asm_OP_u32(_ *_Instr) { - self.store_int(16, _F_u64toa, "MOVLQZX") -} - -func (self *_Assembler) _asm_OP_u64(_ *_Instr) { - self.store_int(20, _F_u64toa, "MOVQ") -} - -func (self *_Assembler) _asm_OP_f32(_ *_Instr) { - self.check_size(32) - self.Emit("MOVL" , jit.Ptr(_SP_p, 0), _AX) // MOVL (SP.p), AX - self.Emit("ANDL" , jit.Imm(_FM_exp32), _AX) // ANDL $_FM_exp32, AX - self.Emit("XORL" , jit.Imm(_FM_exp32), _AX) // XORL $_FM_exp32, AX - self.Sjmp("JZ" , _LB_error_nan_or_infinite) // JZ _error_nan_or_infinite - self.save_c() // SAVE $C_regs - self.rbuf_di() // MOVQ RP, DI - self.Emit("MOVSS" , jit.Ptr(_SP_p, 0), _X0) // MOVSS (SP.p), X0 - self.call_c(_F_f32toa) // CALL_C f64toa - self.Emit("ADDQ" , _AX, _RL) // ADDQ AX, RL -} - -func (self *_Assembler) _asm_OP_f64(_ *_Instr) { - self.check_size(32) - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("MOVQ" , jit.Imm(_FM_exp64), _CX) // MOVQ $_FM_exp64, CX - self.Emit("ANDQ" , _CX, _AX) // ANDQ CX, AX - self.Emit("XORQ" , _CX, _AX) // XORQ CX, AX - self.Sjmp("JZ" , _LB_error_nan_or_infinite) // JZ _error_nan_or_infinite - self.save_c() // SAVE $C_regs - self.rbuf_di() // MOVQ RP, DI - self.Emit("MOVSD" , jit.Ptr(_SP_p, 0), _X0) // MOVSD (SP.p), X0 - self.call_c(_F_f64toa) // CALL_C f64toa - self.Emit("ADDQ" , _AX, _RL) // ADDQ AX, RL -} - -func (self *_Assembler) _asm_OP_str(_ *_Instr) { - self.encode_string(false) -} - -func (self *_Assembler) _asm_OP_bin(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX - self.Emit("ADDQ", jit.Imm(2), _AX) // ADDQ $2, AX - self.Emit("MOVQ", jit.Imm(_IM_mulv), _CX) // MOVQ $_MF_mulv, CX - self.Emit("MOVQ", _DX, _R8) // MOVQ DX, R8 - self.From("MULQ", _CX) // MULQ CX - self.Emit("LEAQ", jit.Sib(_DX, _DX, 1, 1), _AX) // LEAQ 1(DX)(DX), AX - self.Emit("ORQ" , jit.Imm(2), _AX) // ORQ $2, AX - self.Emit("MOVQ", _R8, _DX) // MOVQ R8, DX - self.check_size_r(_AX, 0) // SIZE AX - self.add_char('"') // CHAR $'"' - self.save_c() // SAVE $REG_ffi - self.prep_buffer_c() // MOVE {buf}, DI - self.Emit("MOVQ", _SP_p, _SI) // MOVQ SP.p, SI - - /* check for AVX2 support */ - if !cpu.HasAVX2 { - self.Emit("XORL", _DX, _DX) // XORL DX, DX - } else { - self.Emit("MOVL", jit.Imm(_MODE_AVX2), _DX) // MOVL $_MODE_AVX2, DX - } - - /* call the encoder */ - self.call_c(_F_b64encode) // CALL b64encode - self.load_buffer() // LOAD {buf} - self.add_char('"') // CHAR $'"' -} - -func (self *_Assembler) _asm_OP_quote(_ *_Instr) { - self.encode_string(true) -} - -func (self *_Assembler) _asm_OP_number(_ *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX) // MOVQ (SP.p), CX - self.Emit("TESTQ", _CX, _CX) // TESTQ CX, CX - self.Sjmp("JZ" , "_empty_{n}") // JZ _empty_{n} - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JNZ" , "_number_next_{n}") - self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), jit.Ptr(_SP, 0)) - self.Sjmp("JMP", _LB_panic) - self.Link("_number_next_{n}") - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP) - self.call_go(_F_isValidNumber) // CALL_GO isValidNumber - self.Emit("CMPB" , jit.Ptr(_SP, 16), jit.Imm(0)) // CMPB 16(SP), $0 - self.Sjmp("JE" , _LB_error_invalid_number) // JE _error_invalid_number - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX - self.check_size_r(_AX, 0) // SIZE AX - self.Emit("LEAQ" , jit.Sib(_RP, _RL, 1, 0), _AX) // LEAQ (RP)(RL), AX - self.Emit("ADDQ" , jit.Ptr(_SP_p, 8), _RL) // ADDQ 8(SP.p), RL - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVOU", jit.Ptr(_SP_p, 0), _X0) // MOVOU (SP.p), X0 - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP) - self.call_go(_F_memmove) // CALL_GO memmove - self.Sjmp("JMP" , "_done_{n}") // JMP _done_{n} - self.Link("_empty_{n}") // _empty_{n}: - self.check_size(1) // SIZE $1 - self.add_char('0') // CHAR $'0' - self.Link("_done_{n}") // _done_{n}: -} - -func (self *_Assembler) _asm_OP_eface(_ *_Instr) { - self.prep_buffer() // MOVE {buf}, (SP)s - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _AX) // LEAQ 8(SP.p), AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP) - self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24)) // MOVQ ST, 24(SP) - self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP) - self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET) // MOVQ 40(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP) // MOVQ 48(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error -} - -func (self *_Assembler) _asm_OP_iface(_ *_Instr) { - self.prep_buffer() // MOVE {buf}, (SP) - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("MOVQ" , jit.Ptr(_AX, 8), _AX) // MOVQ 8(AX), AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _AX) // LEAQ 8(SP.p), AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP) - self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24)) // MOVQ ST, 24(SP) - self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP) - self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET) // MOVQ 40(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP) // MOVQ 48(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error -} - -func (self *_Assembler) _asm_OP_byte(p *_Instr) { - self.check_size(1) - self.Emit("MOVB", jit.Imm(p.i64()), jit.Sib(_RP, _RL, 1, 0)) // MOVL p.vi(), (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL -} - -func (self *_Assembler) _asm_OP_text(p *_Instr) { - self.check_size(len(p.vs())) // SIZE ${len(p.vs())} - self.add_text(p.vs()) // TEXT ${p.vs()} -} - -func (self *_Assembler) _asm_OP_deref(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p -} - -func (self *_Assembler) _asm_OP_index(p *_Instr) { - self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ $p.vi(), AX - self.Emit("ADDQ", _AX, _SP_p) // ADDQ AX, SP.p -} - -func (self *_Assembler) _asm_OP_load(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -24), _SP_x) // MOVQ -24(ST)(AX), SP.x - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -8), _SP_p) // MOVQ -8(ST)(AX), SP.p - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _SP_q) // MOVQ (ST)(AX), SP.q -} - -func (self *_Assembler) _asm_OP_save(_ *_Instr) { - self.save_state() -} - -func (self *_Assembler) _asm_OP_drop(_ *_Instr) { - self.drop_state(_StateSize) -} - -func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) { - self.drop_state(_StateSize * 2) // DROP $(_StateSize * 2) - self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 56)) // MOVOU X0, 56(ST)(AX) -} - -func (self *_Assembler) _asm_OP_recurse(p *_Instr) { - self.prep_buffer() // MOVE {buf}, (SP) - vt, pv := p.vp() - self.Emit("MOVQ", jit.Type(vt), _AX) // MOVQ $(type(p.vt())), AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - - /* check for indirection */ - if !rt.UnpackType(vt).Indirect() { - self.Emit("MOVQ", _SP_p, _AX) // MOVQ SP.p, AX - } else { - self.Emit("MOVQ", _SP_p, _VAR_vp) // MOVQ SP.p, 48(SP) - self.Emit("LEAQ", _VAR_vp, _AX) // LEAQ 48(SP), AX - } - - /* call the encoder */ - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP) - self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24)) // MOVQ ST, 24(SP) - self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX - if pv { - self.Emit("BTCQ", jit.Imm(bitPointerValue), _AX) // BTCQ $1, AX - } - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP) - self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET) // MOVQ 40(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP) // MOVQ 48(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error -} - -func (self *_Assembler) _asm_OP_is_nil(p *_Instr) { - self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_nil_p1(p *_Instr) { - self.Emit("CMPQ", jit.Ptr(_SP_p, 8), jit.Imm(0)) // CMPQ 8(SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_1(p *_Instr) { - self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_2(p *_Instr) { - self.Emit("CMPW", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPW (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_4(p *_Instr) { - self.Emit("CMPL", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPL (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_8(p *_Instr) { - self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_map(p *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Xjmp("JZ" , p.vi()) // JZ p.vi() - self.Emit("CMPQ" , jit.Ptr(_AX, 0), jit.Imm(0)) // CMPQ (AX), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_goto(p *_Instr) { - self.Xjmp("JMP", p.vi()) -} - -func (self *_Assembler) _asm_OP_map_iter(p *_Instr) { - self.Emit("MOVQ" , jit.Type(p.vt()), _AX) // MOVQ $p.vt(), AX - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _CX) // MOVQ (SP.p), CX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP) - self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP) - self.call_go(_F_iteratorStart) // CALL_GO iteratorStart - self.Emit("MOVQ" , jit.Ptr(_SP, 24), _SP_q) // MOVQ 24(SP), SP.q - self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error -} - -func (self *_Assembler) _asm_OP_map_stop(_ *_Instr) { - self.Emit("MOVQ", _SP_q, jit.Ptr(_SP, 0)) // MOVQ SP.q, 0(SP) - self.call_go(_F_iteratorStop) // CALL_GO iteratorStop - self.Emit("XORL", _SP_q, _SP_q) // XORL SP.q, SP.q -} - -func (self *_Assembler) _asm_OP_map_check_key(p *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_SP_q, 0), _SP_p) // MOVQ (SP.q), SP.p - self.Emit("TESTQ", _SP_p, _SP_p) // TESTQ SP.p, SP.p - self.Xjmp("JZ" , p.vi()) // JNZ p.vi() -} - -func (self *_Assembler) _asm_OP_map_write_key(p *_Instr) { - self.Emit("BTQ", jit.Imm(bitSortMapKeys), _ARG_fv) // BTQ ${SortMapKeys}, fv - self.Sjmp("JNC", "_unordered_key_{n}") // JNC _unordered_key_{n} - self.encode_string(false) // STR $false - self.Xjmp("JMP", p.vi()) // JMP ${p.vi()} - self.Link("_unordered_key_{n}") // _unordered_key_{n}: -} - -func (self *_Assembler) _asm_OP_map_value_next(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_q, 8), _SP_p) // MOVQ 8(SP.q), SP.p - self.Emit("MOVQ", _SP_q, jit.Ptr(_SP, 0)) // MOVQ SP.q, (SP) - self.call_go(_F_iteratorNext) // CALL_GO iteratorNext -} - -func (self *_Assembler) _asm_OP_slice_len(_ *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _SP_x) // MOVQ 8(SP.p), SP.x - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p - self.Emit("ORQ" , jit.Imm(1 << _S_init), _SP_f) // ORQ $(1<<_S_init), SP.f -} - -func (self *_Assembler) _asm_OP_slice_next(p *_Instr) { - self.Emit("TESTQ" , _SP_x, _SP_x) // TESTQ SP.x, SP.x - self.Xjmp("JZ" , p.vi()) // JZ p.vi() - self.Emit("SUBQ" , jit.Imm(1), _SP_x) // SUBQ $1, SP.x - self.Emit("BTRQ" , jit.Imm(_S_init), _SP_f) // BTRQ $_S_init, SP.f - self.Emit("LEAQ" , jit.Ptr(_SP_p, int64(p.vlen())), _AX) // LEAQ $(p.vlen())(SP.p), AX - self.Emit("CMOVQCC", _AX, _SP_p) // CMOVQNC AX, SP.p -} - -func (self *_Assembler) _asm_OP_marshal(p *_Instr) { - self.call_marshaler(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt()) -} - -func (self *_Assembler) _asm_OP_marshal_p(p *_Instr) { - if p.vk() != reflect.Ptr { - panic("marshal_p: invalid type") - } else { - self.call_marshaler_v(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt(), false) - } -} - -func (self *_Assembler) _asm_OP_marshal_text(p *_Instr) { - self.call_marshaler(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt()) -} - -func (self *_Assembler) _asm_OP_marshal_text_p(p *_Instr) { - if p.vk() != reflect.Ptr { - panic("marshal_text_p: invalid type") - } else { - self.call_marshaler_v(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt(), false) - } -} - -func (self *_Assembler) _asm_OP_cond_set(_ *_Instr) { - self.Emit("ORQ", jit.Imm(1 << _S_cond), _SP_f) // ORQ $(1<<_S_cond), SP.f -} - -func (self *_Assembler) _asm_OP_cond_testc(p *_Instr) { - self.Emit("BTRQ", jit.Imm(_S_cond), _SP_f) // BTRQ $_S_cond, SP.f - self.Xjmp("JC" , p.vi()) -} - -func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) { - self.Emit("MOVQ", jit.Imm(int64(p2.op())), jit.Ptr(_SP, 16))// MOVQ $(p2.op()), 16(SP) - self.Emit("MOVQ", jit.Imm(int64(p1.op())), jit.Ptr(_SP, 8)) // MOVQ $(p1.op()), 8(SP) - self.Emit("MOVQ", jit.Imm(int64(i)), jit.Ptr(_SP, 0)) // MOVQ $(i), (SP) - self.call_go(_F_println) -} - -var ( - _V_writeBarrier = jit.Imm(int64(_runtime_writeBarrier)) - - _F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX) -) - -func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr) { - if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX { - panic("rec contains AX!") - } - self.Emit("MOVQ", _V_writeBarrier, _R10) - self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0)) - self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", ptr, _AX) - self.xsave(_DI) - self.Emit("LEAQ", rec, _DI) - self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX - self.Rjmp("CALL", _R10) - self.xload(_DI) - self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", ptr, rec) - self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") -} \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/assembler_amd64_go117.go b/vendor/github.com/bytedance/sonic/internal/encoder/assembler_amd64_go117.go deleted file mode 100644 index 1f1b28073e0ba1513533042cb23b1b44930729ef..0000000000000000000000000000000000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/assembler_amd64_go117.go +++ /dev/null @@ -1,1202 +0,0 @@ -//go:build go1.17 && !go1.21 -// +build go1.17,!go1.21 - -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package encoder - -import ( - `fmt` - `reflect` - `strconv` - `unsafe` - - `github.com/bytedance/sonic/internal/cpu` - `github.com/bytedance/sonic/internal/jit` - `github.com/bytedance/sonic/internal/native/types` - `github.com/twitchyliquid64/golang-asm/obj` - `github.com/twitchyliquid64/golang-asm/obj/x86` - - `github.com/bytedance/sonic/internal/native` - `github.com/bytedance/sonic/internal/rt` -) - -/** Register Allocations - * - * State Registers: - * - * %rbx : stack base - * %rdi : result pointer - * %rsi : result length - * %rdx : result capacity - * %r12 : sp->p - * %r13 : sp->q - * %r14 : sp->x - * %r15 : sp->f - * - * Error Registers: - * - * %r10 : error type register - * %r11 : error pointer register - */ - -/** Function Prototype & Stack Map - * - * func (buf *[]byte, p unsafe.Pointer, sb *_Stack, fv uint64) (err error) - * - * buf : (FP) - * p : 8(FP) - * sb : 16(FP) - * fv : 24(FP) - * err.vt : 32(FP) - * err.vp : 40(FP) - */ - -const ( - _S_cond = iota - _S_init -) - -const ( - _FP_args = 32 // 32 bytes for spill registers of arguments - _FP_fargs = 40 // 40 bytes for passing arguments to other Go functions - _FP_saves = 64 // 64 bytes for saving the registers before CALL instructions - _FP_locals = 24 // 24 bytes for local variables -) - -const ( - _FP_loffs = _FP_fargs + _FP_saves - _FP_offs = _FP_loffs + _FP_locals - // _FP_offs = _FP_loffs + _FP_locals + _FP_debug - _FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer - _FP_base = _FP_size + 8 // 8 bytes for the return address -) - -const ( - _FM_exp32 = 0x7f800000 - _FM_exp64 = 0x7ff0000000000000 -) - -const ( - _IM_null = 0x6c6c756e // 'null' - _IM_true = 0x65757274 // 'true' - _IM_fals = 0x736c6166 // 'fals' ('false' without the 'e') - _IM_open = 0x00225c22 // '"\"∅' - _IM_array = 0x5d5b // '[]' - _IM_object = 0x7d7b // '{}' - _IM_mulv = -0x5555555555555555 -) - -const ( - _LB_more_space = "_more_space" - _LB_more_space_return = "_more_space_return_" -) - -const ( - _LB_error = "_error" - _LB_error_too_deep = "_error_too_deep" - _LB_error_invalid_number = "_error_invalid_number" - _LB_error_nan_or_infinite = "_error_nan_or_infinite" - _LB_panic = "_panic" -) - -var ( - _AX = jit.Reg("AX") - _BX = jit.Reg("BX") - _CX = jit.Reg("CX") - _DX = jit.Reg("DX") - _DI = jit.Reg("DI") - _SI = jit.Reg("SI") - _BP = jit.Reg("BP") - _SP = jit.Reg("SP") - _R8 = jit.Reg("R8") - _R9 = jit.Reg("R9") -) - -var ( - _X0 = jit.Reg("X0") - _Y0 = jit.Reg("Y0") -) - -var ( - _ST = jit.Reg("R15") // can't use R14 since it's always scratched by Go... - _RP = jit.Reg("DI") - _RL = jit.Reg("SI") - _RC = jit.Reg("DX") -) - -var ( - _LR = jit.Reg("R9") - _ET = jit.Reg("AX") - _EP = jit.Reg("BX") -) - -var ( - _SP_p = jit.Reg("R10") // saved on BX when call_c - _SP_q = jit.Reg("R11") // saved on BP when call_c - _SP_x = jit.Reg("R12") - _SP_f = jit.Reg("R13") -) - -var ( - _ARG_rb = jit.Ptr(_SP, _FP_base) - _ARG_vp = jit.Ptr(_SP, _FP_base + 8) - _ARG_sb = jit.Ptr(_SP, _FP_base + 16) - _ARG_fv = jit.Ptr(_SP, _FP_base + 24) -) - -var ( - _RET_et = _ET - _RET_ep = _EP -) - -var ( - _VAR_sp = jit.Ptr(_SP, _FP_fargs + _FP_saves) - _VAR_dn = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8) - _VAR_vp = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16) -) - -var ( - _REG_ffi = []obj.Addr{ _RP, _RL, _RC} - _REG_b64 = []obj.Addr{_SP_p, _SP_q} - - _REG_all = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RP, _RL, _RC} - _REG_ms = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _LR} - _REG_enc = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RL} -) - -type _Assembler struct { - jit.BaseAssembler - p _Program - x int - name string -} - -func newAssembler(p _Program) *_Assembler { - return new(_Assembler).Init(p) -} - -/** Assembler Interface **/ - -func (self *_Assembler) Load() _Encoder { - return ptoenc(self.BaseAssembler.Load("encode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs)) -} - -func (self *_Assembler) Init(p _Program) *_Assembler { - self.p = p - self.BaseAssembler.Init(self.compile) - return self -} - -func (self *_Assembler) compile() { - self.prologue() - self.instrs() - self.epilogue() - self.builtins() -} - -/** Assembler Stages **/ - -var _OpFuncTab = [256]func(*_Assembler, *_Instr) { - _OP_null : (*_Assembler)._asm_OP_null, - _OP_empty_arr : (*_Assembler)._asm_OP_empty_arr, - _OP_empty_obj : (*_Assembler)._asm_OP_empty_obj, - _OP_bool : (*_Assembler)._asm_OP_bool, - _OP_i8 : (*_Assembler)._asm_OP_i8, - _OP_i16 : (*_Assembler)._asm_OP_i16, - _OP_i32 : (*_Assembler)._asm_OP_i32, - _OP_i64 : (*_Assembler)._asm_OP_i64, - _OP_u8 : (*_Assembler)._asm_OP_u8, - _OP_u16 : (*_Assembler)._asm_OP_u16, - _OP_u32 : (*_Assembler)._asm_OP_u32, - _OP_u64 : (*_Assembler)._asm_OP_u64, - _OP_f32 : (*_Assembler)._asm_OP_f32, - _OP_f64 : (*_Assembler)._asm_OP_f64, - _OP_str : (*_Assembler)._asm_OP_str, - _OP_bin : (*_Assembler)._asm_OP_bin, - _OP_quote : (*_Assembler)._asm_OP_quote, - _OP_number : (*_Assembler)._asm_OP_number, - _OP_eface : (*_Assembler)._asm_OP_eface, - _OP_iface : (*_Assembler)._asm_OP_iface, - _OP_byte : (*_Assembler)._asm_OP_byte, - _OP_text : (*_Assembler)._asm_OP_text, - _OP_deref : (*_Assembler)._asm_OP_deref, - _OP_index : (*_Assembler)._asm_OP_index, - _OP_load : (*_Assembler)._asm_OP_load, - _OP_save : (*_Assembler)._asm_OP_save, - _OP_drop : (*_Assembler)._asm_OP_drop, - _OP_drop_2 : (*_Assembler)._asm_OP_drop_2, - _OP_recurse : (*_Assembler)._asm_OP_recurse, - _OP_is_nil : (*_Assembler)._asm_OP_is_nil, - _OP_is_nil_p1 : (*_Assembler)._asm_OP_is_nil_p1, - _OP_is_zero_1 : (*_Assembler)._asm_OP_is_zero_1, - _OP_is_zero_2 : (*_Assembler)._asm_OP_is_zero_2, - _OP_is_zero_4 : (*_Assembler)._asm_OP_is_zero_4, - _OP_is_zero_8 : (*_Assembler)._asm_OP_is_zero_8, - _OP_is_zero_map : (*_Assembler)._asm_OP_is_zero_map, - _OP_goto : (*_Assembler)._asm_OP_goto, - _OP_map_iter : (*_Assembler)._asm_OP_map_iter, - _OP_map_stop : (*_Assembler)._asm_OP_map_stop, - _OP_map_check_key : (*_Assembler)._asm_OP_map_check_key, - _OP_map_write_key : (*_Assembler)._asm_OP_map_write_key, - _OP_map_value_next : (*_Assembler)._asm_OP_map_value_next, - _OP_slice_len : (*_Assembler)._asm_OP_slice_len, - _OP_slice_next : (*_Assembler)._asm_OP_slice_next, - _OP_marshal : (*_Assembler)._asm_OP_marshal, - _OP_marshal_p : (*_Assembler)._asm_OP_marshal_p, - _OP_marshal_text : (*_Assembler)._asm_OP_marshal_text, - _OP_marshal_text_p : (*_Assembler)._asm_OP_marshal_text_p, - _OP_cond_set : (*_Assembler)._asm_OP_cond_set, - _OP_cond_testc : (*_Assembler)._asm_OP_cond_testc, -} - -func (self *_Assembler) instr(v *_Instr) { - if fn := _OpFuncTab[v.op()]; fn != nil { - fn(self, v) - } else { - panic(fmt.Sprintf("invalid opcode: %d", v.op())) - } -} - -func (self *_Assembler) instrs() { - for i, v := range self.p { - self.Mark(i) - self.instr(&v) - self.debug_instr(i, &v) - } -} - -func (self *_Assembler) builtins() { - self.more_space() - self.error_too_deep() - self.error_invalid_number() - self.error_nan_or_infinite() - self.go_panic() -} - -func (self *_Assembler) epilogue() { - self.Mark(len(self.p)) - self.Emit("XORL", _ET, _ET) - self.Emit("XORL", _EP, _EP) - self.Link(_LB_error) - self.Emit("MOVQ", _ARG_rb, _CX) // MOVQ rb<>+0(FP), CX - self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8)) // MOVQ RL, 8(CX) - self.Emit("MOVQ", jit.Imm(0), _ARG_rb) // MOVQ AX, rb<>+0(FP) - self.Emit("MOVQ", jit.Imm(0), _ARG_vp) // MOVQ BX, vp<>+8(FP) - self.Emit("MOVQ", jit.Imm(0), _ARG_sb) // MOVQ CX, sb<>+16(FP) - self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP - self.Emit("ADDQ", jit.Imm(_FP_size), _SP) // ADDQ $_FP_size, SP - self.Emit("RET") // RET -} - -func (self *_Assembler) prologue() { - self.Emit("SUBQ", jit.Imm(_FP_size), _SP) // SUBQ $_FP_size, SP - self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP) - self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP - self.Emit("MOVQ", _AX, _ARG_rb) // MOVQ AX, rb<>+0(FP) - self.Emit("MOVQ", _BX, _ARG_vp) // MOVQ BX, vp<>+8(FP) - self.Emit("MOVQ", _CX, _ARG_sb) // MOVQ CX, sb<>+16(FP) - self.Emit("MOVQ", _DI, _ARG_fv) // MOVQ DI, rb<>+24(FP) - self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP) // MOVQ (AX) , DI - self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL) // MOVQ 8(AX) , SI - self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), DX - self.Emit("MOVQ", _BX, _SP_p) // MOVQ BX, R10 - self.Emit("MOVQ", _CX, _ST) // MOVQ CX, R8 - self.Emit("XORL", _SP_x, _SP_x) // XORL R10, R12 - self.Emit("XORL", _SP_f, _SP_f) // XORL R11, R13 - self.Emit("XORL", _SP_q, _SP_q) // XORL R13, R11 -} - -/** Assembler Inline Functions **/ - -func (self *_Assembler) xsave(reg ...obj.Addr) { - for i, v := range reg { - if i > _FP_saves / 8 - 1 { - panic("too many registers to save") - } else { - self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8)) - } - } -} - -func (self *_Assembler) xload(reg ...obj.Addr) { - for i, v := range reg { - if i > _FP_saves / 8 - 1 { - panic("too many registers to load") - } else { - self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v) - } - } -} - -func (self *_Assembler) rbuf_di() { - if _RP.Reg != x86.REG_DI { - panic("register allocation messed up: RP != DI") - } else { - self.Emit("ADDQ", _RL, _RP) - } -} - -func (self *_Assembler) store_int(nd int, fn obj.Addr, ins string) { - self.check_size(nd) - self.save_c() // SAVE $C_regs - self.rbuf_di() // MOVQ RP, DI - self.Emit(ins, jit.Ptr(_SP_p, 0), _SI) // $ins (SP.p), SI - self.call_c(fn) // CALL_C $fn - self.Emit("ADDQ", _AX, _RL) // ADDQ AX, RL -} - -func (self *_Assembler) store_str(s string) { - i := 0 - m := rt.Str2Mem(s) - - /* 8-byte stores */ - for i <= len(m) - 8 { - self.Emit("MOVQ", jit.Imm(rt.Get64(m[i:])), _AX) // MOVQ $s[i:], AX - self.Emit("MOVQ", _AX, jit.Sib(_RP, _RL, 1, int64(i))) // MOVQ AX, i(RP)(RL) - i += 8 - } - - /* 4-byte stores */ - if i <= len(m) - 4 { - self.Emit("MOVL", jit.Imm(int64(rt.Get32(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVL $s[i:], i(RP)(RL) - i += 4 - } - - /* 2-byte stores */ - if i <= len(m) - 2 { - self.Emit("MOVW", jit.Imm(int64(rt.Get16(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVW $s[i:], i(RP)(RL) - i += 2 - } - - /* last byte */ - if i < len(m) { - self.Emit("MOVB", jit.Imm(int64(m[i])), jit.Sib(_RP, _RL, 1, int64(i))) // MOVB $s[i:], i(RP)(RL) - } -} - -func (self *_Assembler) check_size(n int) { - self.check_size_rl(jit.Ptr(_RL, int64(n))) -} - -func (self *_Assembler) check_size_r(r obj.Addr, d int) { - self.check_size_rl(jit.Sib(_RL, r, 1, int64(d))) -} - -func (self *_Assembler) check_size_rl(v obj.Addr) { - idx := self.x - key := _LB_more_space_return + strconv.Itoa(idx) - - /* the following code relies on LR == R9 to work */ - if _LR.Reg != x86.REG_R9 { - panic("register allocation messed up: LR != R9") - } - - /* check for buffer capacity */ - self.x++ - self.Emit("LEAQ", v, _AX) // LEAQ $v, AX - self.Emit("CMPQ", _AX, _RC) // CMPQ AX, RC - self.Sjmp("JBE" , key) // JBE _more_space_return_{n} - self.slice_grow_ax(key) // GROW $key - self.Link(key) // _more_space_return_{n}: -} - -func (self *_Assembler) slice_grow_ax(ret string) { - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ ?(PC), R9 - self.Sref(ret, 4) // .... &ret - self.Sjmp("JMP" , _LB_more_space) // JMP _more_space -} - -/** State Stack Helpers **/ - -const ( - _StateSize = int64(unsafe.Sizeof(_State{})) - _StackLimit = _MaxStack * _StateSize -) - -func (self *_Assembler) save_state() { - self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX) // MOVQ (ST), CX - self.Emit("LEAQ", jit.Ptr(_CX, _StateSize), _R9) // LEAQ _StateSize(CX), R9 - self.Emit("CMPQ", _R9, jit.Imm(_StackLimit)) // CMPQ R9, $_StackLimit - self.Sjmp("JAE" , _LB_error_too_deep) // JA _error_too_deep - self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8)) // MOVQ SP.x, 8(ST)(CX) - self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16)) // MOVQ SP.f, 16(ST)(CX) - self.WriteRecNotAX(0, _SP_p, jit.Sib(_ST, _CX, 1, 24)) // MOVQ SP.p, 24(ST)(CX) - self.WriteRecNotAX(1, _SP_q, jit.Sib(_ST, _CX, 1, 32)) // MOVQ SP.q, 32(ST)(CX) - self.Emit("MOVQ", _R9, jit.Ptr(_ST, 0)) // MOVQ R9, (ST) -} - -func (self *_Assembler) drop_state(decr int64) { - self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX - self.Emit("SUBQ" , jit.Imm(decr), _AX) // SUBQ $decr, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST) - self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _SP_x) // MOVQ 8(ST)(AX), SP.x - self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 16), _SP_f) // MOVQ 16(ST)(AX), SP.f - self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 24), _SP_p) // MOVQ 24(ST)(AX), SP.p - self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 32), _SP_q) // MOVQ 32(ST)(AX), SP.q - self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0 - self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX) - self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 24)) // MOVOU X0, 24(ST)(AX) -} - -/** Buffer Helpers **/ - -func (self *_Assembler) add_char(ch byte) { - self.Emit("MOVB", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVB $ch, (RP)(RL) - self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL -} - -func (self *_Assembler) add_long(ch uint32, n int64) { - self.Emit("MOVL", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVL $ch, (RP)(RL) - self.Emit("ADDQ", jit.Imm(n), _RL) // ADDQ $n, RL -} - -func (self *_Assembler) add_text(ss string) { - self.store_str(ss) // TEXT $ss - self.Emit("ADDQ", jit.Imm(int64(len(ss))), _RL) // ADDQ ${len(ss)}, RL -} - -// get *buf at AX -func (self *_Assembler) prep_buffer_AX() { - self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX - self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX) -} - -func (self *_Assembler) save_buffer() { - self.Emit("MOVQ", _ARG_rb, _CX) // MOVQ rb<>+0(FP), CX - self.Emit("MOVQ", _RP, jit.Ptr(_CX, 0)) // MOVQ RP, (CX) - self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8)) // MOVQ RL, 8(CX) - self.Emit("MOVQ", _RC, jit.Ptr(_CX, 16)) // MOVQ RC, 16(CX) -} - -// get *buf at AX -func (self *_Assembler) load_buffer_AX() { - self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX - self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP) // MOVQ (AX), RP - self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL) // MOVQ 8(AX), RL - self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), RC -} - -/** Function Interface Helpers **/ - -func (self *_Assembler) call(pc obj.Addr) { - self.Emit("MOVQ", pc, _LR) // MOVQ $pc, AX - self.Rjmp("CALL", _LR) // CALL AX -} - -func (self *_Assembler) save_c() { - self.xsave(_REG_ffi...) // SAVE $REG_ffi -} - -func (self *_Assembler) call_b64(pc obj.Addr) { - self.xsave(_REG_b64...) // SAVE $REG_all - self.call(pc) // CALL $pc - self.xload(_REG_b64...) // LOAD $REG_ffi -} - -func (self *_Assembler) call_c(pc obj.Addr) { - self.Emit("XCHGQ", _SP_p, _BX) - self.Emit("XCHGQ", _SP_q, _BP) - self.call(pc) // CALL $pc - self.xload(_REG_ffi...) // LOAD $REG_ffi - self.Emit("XCHGQ", _SP_p, _BX) - self.Emit("XCHGQ", _SP_q, _BP) -} - -func (self *_Assembler) call_go(pc obj.Addr) { - self.xsave(_REG_all...) // SAVE $REG_all - self.call(pc) // CALL $pc - self.xload(_REG_all...) // LOAD $REG_all -} - -func (self *_Assembler) call_more_space(pc obj.Addr) { - self.xsave(_REG_ms...) // SAVE $REG_all - self.call(pc) // CALL $pc - self.xload(_REG_ms...) // LOAD $REG_all -} - -func (self *_Assembler) call_encoder(pc obj.Addr) { - self.xsave(_REG_enc...) // SAVE $REG_all - self.call(pc) // CALL $pc - self.xload(_REG_enc...) // LOAD $REG_all -} - -func (self *_Assembler) call_marshaler(fn obj.Addr, it *rt.GoType, vt reflect.Type) { - switch vt.Kind() { - case reflect.Interface : self.call_marshaler_i(fn, it) - case reflect.Ptr, reflect.Map : self.call_marshaler_v(fn, it, vt, true) - // struct/array of 1 direct iface type can be direct - default : self.call_marshaler_v(fn, it, vt, !rt.UnpackType(vt).Indirect()) - } -} - -func (self *_Assembler) call_marshaler_i(fn obj.Addr, it *rt.GoType) { - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JZ" , "_null_{n}") // JZ _null_{n} - self.Emit("MOVQ" , _AX, _BX) // MOVQ AX, BX - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX) // MOVQ 8(SP.p), CX - self.Emit("MOVQ" , jit.Gtype(it), _AX) // MOVQ $it, AX - self.call_go(_F_assertI2I) // CALL_GO assertI2I - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JZ" , "_null_{n}") // JZ _null_{n} - self.Emit("MOVQ", _BX, _CX) // MOVQ BX, CX - self.Emit("MOVQ", _AX, _BX) // MOVQ AX, BX - self.prep_buffer_AX() - self.Emit("MOVQ", _ARG_fv, _DI) // MOVQ ARG.fv, DI - self.call_go(fn) // CALL $fn - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error - self.load_buffer_AX() - self.Sjmp("JMP" , "_done_{n}") // JMP _done_{n} - self.Link("_null_{n}") // _null_{n}: - self.check_size(4) // SIZE $4 - self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL - self.Link("_done_{n}") // _done_{n}: -} - -func (self *_Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.Type, deref bool) { - self.prep_buffer_AX() // MOVE {buf}, (SP) - self.Emit("MOVQ", jit.Itab(it, vt), _BX) // MOVQ $(itab(it, vt)), BX - - /* dereference the pointer if needed */ - if !deref { - self.Emit("MOVQ", _SP_p, _CX) // MOVQ SP.p, CX - } else { - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _CX) // MOVQ 0(SP.p), CX - } - - /* call the encoder, and perform error checks */ - self.Emit("MOVQ", _ARG_fv, _DI) // MOVQ ARG.fv, DI - self.call_go(fn) // CALL $fn - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error - self.load_buffer_AX() -} - -/** Builtin: _more_space **/ - -var ( - _T_byte = jit.Type(byteType) - _F_growslice = jit.Func(growslice) -) - -// AX must saving n -func (self *_Assembler) more_space() { - self.Link(_LB_more_space) - self.Emit("MOVQ", _RP, _BX) // MOVQ DI, BX - self.Emit("MOVQ", _RL, _CX) // MOVQ SI, CX - self.Emit("MOVQ", _RC, _DI) // MOVQ DX, DI - self.Emit("MOVQ", _AX, _SI) // MOVQ AX, SI - self.Emit("MOVQ", _T_byte, _AX) // MOVQ $_T_byte, AX - self.call_more_space(_F_growslice) // CALL $pc - self.Emit("MOVQ", _AX, _RP) // MOVQ AX, DI - self.Emit("MOVQ", _BX, _RL) // MOVQ BX, SI - self.Emit("MOVQ", _CX, _RC) // MOVQ CX, DX - self.save_buffer() // SAVE {buf} - self.Rjmp("JMP" , _LR) // JMP LR -} - -/** Builtin Errors **/ - -var ( - _V_ERR_too_deep = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_too_deep)))) - _V_ERR_nan_or_infinite = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_nan_or_infinite)))) - _I_json_UnsupportedValueError = jit.Itab(rt.UnpackType(errorType), jsonUnsupportedValueType) -) - -func (self *_Assembler) error_too_deep() { - self.Link(_LB_error_too_deep) - self.Emit("MOVQ", _V_ERR_too_deep, _EP) // MOVQ $_V_ERR_too_deep, EP - self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -func (self *_Assembler) error_invalid_number() { - self.Link(_LB_error_invalid_number) - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ 0(SP), AX - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _BX) // MOVQ 8(SP), BX - self.call_go(_F_error_number) // CALL_GO error_number - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -func (self *_Assembler) error_nan_or_infinite() { - self.Link(_LB_error_nan_or_infinite) - self.Emit("MOVQ", _V_ERR_nan_or_infinite, _EP) // MOVQ $_V_ERR_nan_or_infinite, EP - self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -/** String Encoding Routine **/ - -var ( - _F_quote = jit.Imm(int64(native.S_quote)) - _F_panic = jit.Func(goPanic) -) - -func (self *_Assembler) go_panic() { - self.Link(_LB_panic) - self.Emit("MOVQ", _SP_p, _BX) - self.call_go(_F_panic) -} - -func (self *_Assembler) encode_string(doubleQuote bool) { - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JZ" , "_str_empty_{n}") // JZ _str_empty_{n} - self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) - self.Sjmp("JNE" , "_str_next_{n}") - self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), _AX) - self.Sjmp("JMP", _LB_panic) - self.Link("_str_next_{n}") - - /* openning quote, check for double quote */ - if !doubleQuote { - self.check_size_r(_AX, 2) // SIZE $2 - self.add_char('"') // CHAR $'"' - } else { - self.check_size_r(_AX, 6) // SIZE $6 - self.add_long(_IM_open, 3) // TEXT $`"\"` - } - - /* quoting loop */ - self.Emit("XORL", _AX, _AX) // XORL AX, AX - self.Emit("MOVQ", _AX, _VAR_sp) // MOVQ AX, sp - self.Link("_str_loop_{n}") // _str_loop_{n}: - self.save_c() // SAVE $REG_ffi - - /* load the output buffer first, and then input buffer, - * because the parameter registers collide with RP / RL / RC */ - self.Emit("MOVQ", _RC, _CX) // MOVQ RC, CX - self.Emit("SUBQ", _RL, _CX) // SUBQ RL, CX - self.Emit("MOVQ", _CX, _VAR_dn) // MOVQ CX, dn - self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _DX) // LEAQ (RP)(RL), DX - self.Emit("LEAQ", _VAR_dn, _CX) // LEAQ dn, CX - self.Emit("MOVQ", _VAR_sp, _AX) // MOVQ sp, AX - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _DI) // MOVQ (SP.p), DI - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SI) // MOVQ 8(SP.p), SI - self.Emit("ADDQ", _AX, _DI) // ADDQ AX, DI - self.Emit("SUBQ", _AX, _SI) // SUBQ AX, SI - - /* set the flags based on `doubleQuote` */ - if !doubleQuote { - self.Emit("XORL", _R8, _R8) // XORL R8, R8 - } else { - self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8 - } - - /* call the native quoter */ - self.call_c(_F_quote) // CALL quote - self.Emit("ADDQ" , _VAR_dn, _RL) // ADDQ dn, RL - - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , "_str_space_{n}") // JS _str_space_{n} - - /* close the string, check for double quote */ - if !doubleQuote { - self.check_size(1) // SIZE $1 - self.add_char('"') // CHAR $'"' - self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n} - } else { - self.check_size(3) // SIZE $3 - self.add_text("\\\"\"") // TEXT $'\""' - self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n} - } - - /* not enough space to contain the quoted string */ - self.Link("_str_space_{n}") // _str_space_{n}: - self.Emit("NOTQ", _AX) // NOTQ AX - self.Emit("ADDQ", _AX, _VAR_sp) // ADDQ AX, sp - self.Emit("LEAQ", jit.Sib(_RC, _RC, 1, 0), _AX) // LEAQ (RC)(RC), AX - self.slice_grow_ax("_str_loop_{n}") // GROW _str_loop_{n} - - /* empty string, check for double quote */ - if !doubleQuote { - self.Link("_str_empty_{n}") // _str_empty_{n}: - self.check_size(2) // SIZE $2 - self.add_text("\"\"") // TEXT $'""' - self.Link("_str_end_{n}") // _str_end_{n}: - } else { - self.Link("_str_empty_{n}") // _str_empty_{n}: - self.check_size(6) // SIZE $6 - self.add_text("\"\\\"\\\"\"") // TEXT $'"\"\""' - self.Link("_str_end_{n}") // _str_end_{n}: - } -} - -/** OpCode Assembler Functions **/ - -var ( - _T_json_Marshaler = rt.UnpackType(jsonMarshalerType) - _T_encoding_TextMarshaler = rt.UnpackType(encodingTextMarshalerType) -) - -var ( - _F_f64toa = jit.Imm(int64(native.S_f64toa)) - _F_f32toa = jit.Imm(int64(native.S_f32toa)) - _F_i64toa = jit.Imm(int64(native.S_i64toa)) - _F_u64toa = jit.Imm(int64(native.S_u64toa)) - _F_b64encode = jit.Imm(int64(_subr__b64encode)) -) - -var ( - _F_memmove = jit.Func(memmove) - _F_error_number = jit.Func(error_number) - _F_isValidNumber = jit.Func(isValidNumber) -) - -var ( - _F_iteratorStop = jit.Func(iteratorStop) - _F_iteratorNext = jit.Func(iteratorNext) - _F_iteratorStart = jit.Func(iteratorStart) -) - -var ( - _F_encodeTypedPointer obj.Addr - _F_encodeJsonMarshaler obj.Addr - _F_encodeTextMarshaler obj.Addr -) - -const ( - _MODE_AVX2 = 1 << 2 -) - -func init() { - _F_encodeTypedPointer = jit.Func(encodeTypedPointer) - _F_encodeJsonMarshaler = jit.Func(encodeJsonMarshaler) - _F_encodeTextMarshaler = jit.Func(encodeTextMarshaler) -} - -func (self *_Assembler) _asm_OP_null(_ *_Instr) { - self.check_size(4) - self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL -} - -func (self *_Assembler) _asm_OP_empty_arr(_ *_Instr) { - self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv) - self.Sjmp("JC", "_empty_arr_{n}") - self._asm_OP_null(nil) - self.Sjmp("JMP", "_empty_arr_end_{n}") - self.Link("_empty_arr_{n}") - self.check_size(2) - self.Emit("MOVW", jit.Imm(_IM_array), jit.Sib(_RP, _RL, 1, 0)) - self.Emit("ADDQ", jit.Imm(2), _RL) - self.Link("_empty_arr_end_{n}") -} - -func (self *_Assembler) _asm_OP_empty_obj(_ *_Instr) { - self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv) - self.Sjmp("JC", "_empty_obj_{n}") - self._asm_OP_null(nil) - self.Sjmp("JMP", "_empty_obj_end_{n}") - self.Link("_empty_obj_{n}") - self.check_size(2) - self.Emit("MOVW", jit.Imm(_IM_object), jit.Sib(_RP, _RL, 1, 0)) - self.Emit("ADDQ", jit.Imm(2), _RL) - self.Link("_empty_obj_end_{n}") -} - -func (self *_Assembler) _asm_OP_bool(_ *_Instr) { - self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0 - self.Sjmp("JE" , "_false_{n}") // JE _false_{n} - self.check_size(4) // SIZE $4 - self.Emit("MOVL", jit.Imm(_IM_true), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'true', (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL - self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n} - self.Link("_false_{n}") // _false_{n}: - self.check_size(5) // SIZE $5 - self.Emit("MOVL", jit.Imm(_IM_fals), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'fals', (RP)(RL*1) - self.Emit("MOVB", jit.Imm('e'), jit.Sib(_RP, _RL, 1, 4)) // MOVB $'e', 4(RP)(RL*1) - self.Emit("ADDQ", jit.Imm(5), _RL) // ADDQ $5, RL - self.Link("_end_{n}") // _end_{n}: -} - -func (self *_Assembler) _asm_OP_i8(_ *_Instr) { - self.store_int(4, _F_i64toa, "MOVBQSX") -} - -func (self *_Assembler) _asm_OP_i16(_ *_Instr) { - self.store_int(6, _F_i64toa, "MOVWQSX") -} - -func (self *_Assembler) _asm_OP_i32(_ *_Instr) { - self.store_int(17, _F_i64toa, "MOVLQSX") -} - -func (self *_Assembler) _asm_OP_i64(_ *_Instr) { - self.store_int(21, _F_i64toa, "MOVQ") -} - -func (self *_Assembler) _asm_OP_u8(_ *_Instr) { - self.store_int(3, _F_u64toa, "MOVBQZX") -} - -func (self *_Assembler) _asm_OP_u16(_ *_Instr) { - self.store_int(5, _F_u64toa, "MOVWQZX") -} - -func (self *_Assembler) _asm_OP_u32(_ *_Instr) { - self.store_int(16, _F_u64toa, "MOVLQZX") -} - -func (self *_Assembler) _asm_OP_u64(_ *_Instr) { - self.store_int(20, _F_u64toa, "MOVQ") -} - -func (self *_Assembler) _asm_OP_f32(_ *_Instr) { - self.check_size(32) - self.Emit("MOVL" , jit.Ptr(_SP_p, 0), _AX) // MOVL (SP.p), AX - self.Emit("ANDL" , jit.Imm(_FM_exp32), _AX) // ANDL $_FM_exp32, AX - self.Emit("XORL" , jit.Imm(_FM_exp32), _AX) // XORL $_FM_exp32, AX - self.Sjmp("JZ" , _LB_error_nan_or_infinite) // JZ _error_nan_or_infinite - self.save_c() // SAVE $C_regs - self.rbuf_di() // MOVQ RP, DI - self.Emit("MOVSS" , jit.Ptr(_SP_p, 0), _X0) // MOVSS (SP.p), X0 - self.call_c(_F_f32toa) // CALL_C f64toa - self.Emit("ADDQ" , _AX, _RL) // ADDQ AX, RL -} - -func (self *_Assembler) _asm_OP_f64(_ *_Instr) { - self.check_size(32) - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("MOVQ" , jit.Imm(_FM_exp64), _CX) // MOVQ $_FM_exp64, CX - self.Emit("ANDQ" , _CX, _AX) // ANDQ CX, AX - self.Emit("XORQ" , _CX, _AX) // XORQ CX, AX - self.Sjmp("JZ" , _LB_error_nan_or_infinite) // JZ _error_nan_or_infinite - self.save_c() // SAVE $C_regs - self.rbuf_di() // MOVQ RP, DI - self.Emit("MOVSD" , jit.Ptr(_SP_p, 0), _X0) // MOVSD (SP.p), X0 - self.call_c(_F_f64toa) // CALL_C f64toa - self.Emit("ADDQ" , _AX, _RL) // ADDQ AX, RL -} - -func (self *_Assembler) _asm_OP_str(_ *_Instr) { - self.encode_string(false) -} - -func (self *_Assembler) _asm_OP_bin(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX - self.Emit("ADDQ", jit.Imm(2), _AX) // ADDQ $2, AX - self.Emit("MOVQ", jit.Imm(_IM_mulv), _CX) // MOVQ $_MF_mulv, CX - self.Emit("MOVQ", _DX, _BX) // MOVQ DX, BX - self.From("MULQ", _CX) // MULQ CX - self.Emit("LEAQ", jit.Sib(_DX, _DX, 1, 1), _AX) // LEAQ 1(DX)(DX), AX - self.Emit("ORQ" , jit.Imm(2), _AX) // ORQ $2, AX - self.Emit("MOVQ", _BX, _DX) // MOVQ BX, DX - self.check_size_r(_AX, 0) // SIZE AX - self.add_char('"') // CHAR $'"' - self.Emit("MOVQ", _ARG_rb, _DI) // MOVQ rb<>+0(FP), DI - self.Emit("MOVQ", _RL, jit.Ptr(_DI, 8)) // MOVQ SI, 8(DI) - self.Emit("MOVQ", _SP_p, _SI) // MOVQ SP.p, SI - - /* check for AVX2 support */ - if !cpu.HasAVX2 { - self.Emit("XORL", _DX, _DX) // XORL DX, DX - } else { - self.Emit("MOVL", jit.Imm(_MODE_AVX2), _DX) // MOVL $_MODE_AVX2, DX - } - - /* call the encoder */ - self.call_b64(_F_b64encode) // CALL b64encode - self.load_buffer_AX() // LOAD {buf} - self.add_char('"') // CHAR $'"' -} - -func (self *_Assembler) _asm_OP_quote(_ *_Instr) { - self.encode_string(true) -} - -func (self *_Assembler) _asm_OP_number(_ *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _BX) // MOVQ (SP.p), BX - self.Emit("TESTQ", _BX, _BX) // TESTQ BX, BX - self.Sjmp("JZ" , "_empty_{n}") - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JNZ" , "_number_next_{n}") - self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), _AX) - self.Sjmp("JMP", _LB_panic) - self.Link("_number_next_{n}") - self.call_go(_F_isValidNumber) // CALL_GO isValidNumber - self.Emit("CMPB" , _AX, jit.Imm(0)) // CMPB AX, $0 - self.Sjmp("JE" , _LB_error_invalid_number) // JE _error_invalid_number - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _BX) // MOVQ (SP.p), BX - self.check_size_r(_BX, 0) // SIZE BX - self.Emit("LEAQ" , jit.Sib(_RP, _RL, 1, 0), _AX) // LEAQ (RP)(RL), AX - self.Emit("ADDQ" , jit.Ptr(_SP_p, 8), _RL) // ADDQ 8(SP.p), RL - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _BX) // MOVOU (SP.p), BX - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _CX) // MOVOU X0, 8(SP) - self.call_go(_F_memmove) // CALL_GO memmove - self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX - self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX) - self.Sjmp("JMP" , "_done_{n}") // JMP _done_{n} - self.Link("_empty_{n}") // _empty_{n} - self.check_size(1) // SIZE $1 - self.add_char('0') // CHAR $'0' - self.Link("_done_{n}") // _done_{n}: -} - -func (self *_Assembler) _asm_OP_eface(_ *_Instr) { - self.prep_buffer_AX() // MOVE {buf}, AX - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _BX) // MOVQ (SP.p), BX - self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _CX) // LEAQ 8(SP.p), CX - self.Emit("MOVQ" , _ST, _DI) // MOVQ ST, DI - self.Emit("MOVQ" , _ARG_fv, _SI) // MOVQ fv, AX - self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error - self.load_buffer_AX() -} - -func (self *_Assembler) _asm_OP_iface(_ *_Instr) { - self.prep_buffer_AX() // MOVE {buf}, AX - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _CX) // MOVQ (SP.p), CX - self.Emit("MOVQ" , jit.Ptr(_CX, 8), _BX) // MOVQ 8(CX), BX - self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _CX) // LEAQ 8(SP.p), CX - self.Emit("MOVQ" , _ST, _DI) // MOVQ ST, DI - self.Emit("MOVQ" , _ARG_fv, _SI) // MOVQ fv, AX - self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error - self.load_buffer_AX() -} - -func (self *_Assembler) _asm_OP_byte(p *_Instr) { - self.check_size(1) - self.Emit("MOVB", jit.Imm(p.i64()), jit.Sib(_RP, _RL, 1, 0)) // MOVL p.vi(), (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL -} - -func (self *_Assembler) _asm_OP_text(p *_Instr) { - self.check_size(len(p.vs())) // SIZE ${len(p.vs())} - self.add_text(p.vs()) // TEXT ${p.vs()} -} - -func (self *_Assembler) _asm_OP_deref(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p -} - -func (self *_Assembler) _asm_OP_index(p *_Instr) { - self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ $p.vi(), AX - self.Emit("ADDQ", _AX, _SP_p) // ADDQ AX, SP.p -} - -func (self *_Assembler) _asm_OP_load(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -24), _SP_x) // MOVQ -24(ST)(AX), SP.x - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -8), _SP_p) // MOVQ -8(ST)(AX), SP.p - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _SP_q) // MOVQ (ST)(AX), SP.q -} - -func (self *_Assembler) _asm_OP_save(_ *_Instr) { - self.save_state() -} - -func (self *_Assembler) _asm_OP_drop(_ *_Instr) { - self.drop_state(_StateSize) -} - -func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) { - self.drop_state(_StateSize * 2) // DROP $(_StateSize * 2) - self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 56)) // MOVOU X0, 56(ST)(AX) -} - -func (self *_Assembler) _asm_OP_recurse(p *_Instr) { - self.prep_buffer_AX() // MOVE {buf}, (SP) - vt, pv := p.vp() - self.Emit("MOVQ", jit.Type(vt), _BX) // MOVQ $(type(p.vt())), BX - - /* check for indirection */ - if !rt.UnpackType(vt).Indirect() { - self.Emit("MOVQ", _SP_p, _CX) // MOVQ SP.p, CX - } else { - self.Emit("MOVQ", _SP_p, _VAR_vp) // MOVQ SP.p, VAR.vp - self.Emit("LEAQ", _VAR_vp, _CX) // LEAQ VAR.vp, CX - } - - /* call the encoder */ - self.Emit("MOVQ" , _ST, _DI) // MOVQ ST, DI - self.Emit("MOVQ" , _ARG_fv, _SI) // MOVQ $fv, SI - if pv { - self.Emit("BTCQ", jit.Imm(bitPointerValue), _SI) // BTCQ $1, SI - } - self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error - self.load_buffer_AX() -} - -func (self *_Assembler) _asm_OP_is_nil(p *_Instr) { - self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_nil_p1(p *_Instr) { - self.Emit("CMPQ", jit.Ptr(_SP_p, 8), jit.Imm(0)) // CMPQ 8(SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_1(p *_Instr) { - self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_2(p *_Instr) { - self.Emit("CMPW", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPW (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_4(p *_Instr) { - self.Emit("CMPL", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPL (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_8(p *_Instr) { - self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_map(p *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Xjmp("JZ" , p.vi()) // JZ p.vi() - self.Emit("CMPQ" , jit.Ptr(_AX, 0), jit.Imm(0)) // CMPQ (AX), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_goto(p *_Instr) { - self.Xjmp("JMP", p.vi()) -} - -func (self *_Assembler) _asm_OP_map_iter(p *_Instr) { - self.Emit("MOVQ" , jit.Type(p.vt()), _AX) // MOVQ $p.vt(), AX - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _BX) // MOVQ (SP.p), BX - self.Emit("MOVQ" , _ARG_fv, _CX) // MOVQ fv, CX - self.call_go(_F_iteratorStart) // CALL_GO iteratorStart - self.Emit("MOVQ" , _AX, _SP_q) // MOVQ AX, SP.q - self.Emit("MOVQ" , _BX, _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ" , _CX, _EP) // MOVQ 40(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error -} - -func (self *_Assembler) _asm_OP_map_stop(_ *_Instr) { - self.Emit("MOVQ", _SP_q, _AX) // MOVQ SP.q, AX - self.call_go(_F_iteratorStop) // CALL_GO iteratorStop - self.Emit("XORL", _SP_q, _SP_q) // XORL SP.q, SP.q -} - -func (self *_Assembler) _asm_OP_map_check_key(p *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_SP_q, 0), _SP_p) // MOVQ (SP.q), SP.p - self.Emit("TESTQ", _SP_p, _SP_p) // TESTQ SP.p, SP.p - self.Xjmp("JZ" , p.vi()) // JNZ p.vi() -} - -func (self *_Assembler) _asm_OP_map_write_key(p *_Instr) { - self.Emit("BTQ", jit.Imm(bitSortMapKeys), _ARG_fv) // BTQ ${SortMapKeys}, fv - self.Sjmp("JNC", "_unordered_key_{n}") // JNC _unordered_key_{n} - self.encode_string(false) // STR $false - self.Xjmp("JMP", p.vi()) // JMP ${p.vi()} - self.Link("_unordered_key_{n}") // _unordered_key_{n}: -} - -func (self *_Assembler) _asm_OP_map_value_next(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_q, 8), _SP_p) // MOVQ 8(SP.q), SP.p - self.Emit("MOVQ", _SP_q, _AX) // MOVQ SP.q, AX - self.call_go(_F_iteratorNext) // CALL_GO iteratorNext -} - -func (self *_Assembler) _asm_OP_slice_len(_ *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _SP_x) // MOVQ 8(SP.p), SP.x - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p - self.Emit("ORQ" , jit.Imm(1 << _S_init), _SP_f) // ORQ $(1<<_S_init), SP.f -} - -func (self *_Assembler) _asm_OP_slice_next(p *_Instr) { - self.Emit("TESTQ" , _SP_x, _SP_x) // TESTQ SP.x, SP.x - self.Xjmp("JZ" , p.vi()) // JZ p.vi() - self.Emit("SUBQ" , jit.Imm(1), _SP_x) // SUBQ $1, SP.x - self.Emit("BTRQ" , jit.Imm(_S_init), _SP_f) // BTRQ $_S_init, SP.f - self.Emit("LEAQ" , jit.Ptr(_SP_p, int64(p.vlen())), _AX) // LEAQ $(p.vlen())(SP.p), AX - self.Emit("CMOVQCC", _AX, _SP_p) // CMOVQNC AX, SP.p -} - -func (self *_Assembler) _asm_OP_marshal(p *_Instr) { - self.call_marshaler(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt()) -} - -func (self *_Assembler) _asm_OP_marshal_p(p *_Instr) { - if p.vk() != reflect.Ptr { - panic("marshal_p: invalid type") - } else { - self.call_marshaler_v(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt(), false) - } -} - -func (self *_Assembler) _asm_OP_marshal_text(p *_Instr) { - self.call_marshaler(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt()) -} - -func (self *_Assembler) _asm_OP_marshal_text_p(p *_Instr) { - if p.vk() != reflect.Ptr { - panic("marshal_text_p: invalid type") - } else { - self.call_marshaler_v(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt(), false) - } -} - -func (self *_Assembler) _asm_OP_cond_set(_ *_Instr) { - self.Emit("ORQ", jit.Imm(1 << _S_cond), _SP_f) // ORQ $(1<<_S_cond), SP.f -} - -func (self *_Assembler) _asm_OP_cond_testc(p *_Instr) { - self.Emit("BTRQ", jit.Imm(_S_cond), _SP_f) // BTRQ $_S_cond, SP.f - self.Xjmp("JC" , p.vi()) -} - -func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) { - self.Emit("MOVQ", jit.Imm(int64(p2.op())), _CX) // MOVQ $(p2.op()), AX - self.Emit("MOVQ", jit.Imm(int64(p1.op())), _BX) // MOVQ $(p1.op()), BX - self.Emit("MOVQ", jit.Imm(int64(i)), _AX) // MOVQ $(i), CX - self.call_go(_F_println) -} - -var ( - _V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&_runtime_writeBarrier)))) - - _F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX) -) - -func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr) { - if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX { - panic("rec contains AX!") - } - self.Emit("MOVQ", _V_writeBarrier, _BX) - self.Emit("CMPL", jit.Ptr(_BX, 0), jit.Imm(0)) - self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.xsave(_DI) - self.Emit("MOVQ", ptr, _AX) - self.Emit("LEAQ", rec, _DI) - self.Emit("MOVQ", _F_gcWriteBarrierAX, _BX) // MOVQ ${fn}, AX - self.Rjmp("CALL", _BX) - self.xload(_DI) - self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", ptr, rec) - self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") -} \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/compiler.go b/vendor/github.com/bytedance/sonic/internal/encoder/compiler.go index a949c90f7bd0240dd338dbb30bf94dd80770f867..902fbc98bc70f054ab6e49d8506b5f96a6c94d22 100644 --- a/vendor/github.com/bytedance/sonic/internal/encoder/compiler.go +++ b/vendor/github.com/bytedance/sonic/internal/encoder/compiler.go @@ -17,869 +17,660 @@ package encoder import ( - `fmt` - `reflect` - `strconv` - `strings` - `unsafe` - - `github.com/bytedance/sonic/internal/resolver` - `github.com/bytedance/sonic/internal/rt` - `github.com/bytedance/sonic/option` + "reflect" + "unsafe" + + "github.com/bytedance/sonic/internal/encoder/ir" + "github.com/bytedance/sonic/internal/encoder/vars" + "github.com/bytedance/sonic/internal/encoder/vm" + "github.com/bytedance/sonic/internal/resolver" + "github.com/bytedance/sonic/internal/rt" + "github.com/bytedance/sonic/option" ) -type _Op uint8 - -const ( - _OP_null _Op = iota + 1 - _OP_empty_arr - _OP_empty_obj - _OP_bool - _OP_i8 - _OP_i16 - _OP_i32 - _OP_i64 - _OP_u8 - _OP_u16 - _OP_u32 - _OP_u64 - _OP_f32 - _OP_f64 - _OP_str - _OP_bin - _OP_quote - _OP_number - _OP_eface - _OP_iface - _OP_byte - _OP_text - _OP_deref - _OP_index - _OP_load - _OP_save - _OP_drop - _OP_drop_2 - _OP_recurse - _OP_is_nil - _OP_is_nil_p1 - _OP_is_zero_1 - _OP_is_zero_2 - _OP_is_zero_4 - _OP_is_zero_8 - _OP_is_zero_map - _OP_goto - _OP_map_iter - _OP_map_stop - _OP_map_check_key - _OP_map_write_key - _OP_map_value_next - _OP_slice_len - _OP_slice_next - _OP_marshal - _OP_marshal_p - _OP_marshal_text - _OP_marshal_text_p - _OP_cond_set - _OP_cond_testc -) - -const ( - _INT_SIZE = 32 << (^uint(0) >> 63) - _PTR_SIZE = 32 << (^uintptr(0) >> 63) - _PTR_BYTE = unsafe.Sizeof(uintptr(0)) -) - -const ( - _MAX_ILBUF = 100000 // cutoff at 100k of IL instructions - _MAX_FIELDS = 50 // cutoff at 50 fields struct -) - -var _OpNames = [256]string { - _OP_null : "null", - _OP_empty_arr : "empty_arr", - _OP_empty_obj : "empty_obj", - _OP_bool : "bool", - _OP_i8 : "i8", - _OP_i16 : "i16", - _OP_i32 : "i32", - _OP_i64 : "i64", - _OP_u8 : "u8", - _OP_u16 : "u16", - _OP_u32 : "u32", - _OP_u64 : "u64", - _OP_f32 : "f32", - _OP_f64 : "f64", - _OP_str : "str", - _OP_bin : "bin", - _OP_quote : "quote", - _OP_number : "number", - _OP_eface : "eface", - _OP_iface : "iface", - _OP_byte : "byte", - _OP_text : "text", - _OP_deref : "deref", - _OP_index : "index", - _OP_load : "load", - _OP_save : "save", - _OP_drop : "drop", - _OP_drop_2 : "drop_2", - _OP_recurse : "recurse", - _OP_is_nil : "is_nil", - _OP_is_nil_p1 : "is_nil_p1", - _OP_is_zero_1 : "is_zero_1", - _OP_is_zero_2 : "is_zero_2", - _OP_is_zero_4 : "is_zero_4", - _OP_is_zero_8 : "is_zero_8", - _OP_is_zero_map : "is_zero_map", - _OP_goto : "goto", - _OP_map_iter : "map_iter", - _OP_map_stop : "map_stop", - _OP_map_check_key : "map_check_key", - _OP_map_write_key : "map_write_key", - _OP_map_value_next : "map_value_next", - _OP_slice_len : "slice_len", - _OP_slice_next : "slice_next", - _OP_marshal : "marshal", - _OP_marshal_p : "marshal_p", - _OP_marshal_text : "marshal_text", - _OP_marshal_text_p : "marshal_text_p", - _OP_cond_set : "cond_set", - _OP_cond_testc : "cond_testc", -} - -func (self _Op) String() string { - if ret := _OpNames[self]; ret != "" { - return ret - } else { - return "" - } -} - -func _OP_int() _Op { - switch _INT_SIZE { - case 32: return _OP_i32 - case 64: return _OP_i64 - default: panic("unsupported int size") - } -} - -func _OP_uint() _Op { - switch _INT_SIZE { - case 32: return _OP_u32 - case 64: return _OP_u64 - default: panic("unsupported uint size") - } -} - -func _OP_uintptr() _Op { - switch _PTR_SIZE { - case 32: return _OP_u32 - case 64: return _OP_u64 - default: panic("unsupported pointer size") - } -} - -func _OP_is_zero_ints() _Op { - switch _INT_SIZE { - case 32: return _OP_is_zero_4 - case 64: return _OP_is_zero_8 - default: panic("unsupported integer size") - } -} - -type _Instr struct { - u uint64 // union {op: 8, _: 8, vi: 48}, vi maybe int or len(str) - p unsafe.Pointer // maybe GoString.Ptr, or *GoType -} - -func packOp(op _Op) uint64 { - return uint64(op) << 56 -} - -func newInsOp(op _Op) _Instr { - return _Instr{u: packOp(op)} -} - -func newInsVi(op _Op, vi int) _Instr { - return _Instr{u: packOp(op) | rt.PackInt(vi)} -} - -func newInsVs(op _Op, vs string) _Instr { - return _Instr { - u: packOp(op) | rt.PackInt(len(vs)), - p: (*rt.GoString)(unsafe.Pointer(&vs)).Ptr, - } -} - -func newInsVt(op _Op, vt reflect.Type) _Instr { - return _Instr { - u: packOp(op), - p: unsafe.Pointer(rt.UnpackType(vt)), - } -} - -func newInsVp(op _Op, vt reflect.Type, pv bool) _Instr { - i := 0 - if pv { - i = 1 - } - return _Instr { - u: packOp(op) | rt.PackInt(i), - p: unsafe.Pointer(rt.UnpackType(vt)), - } -} - -func (self _Instr) op() _Op { - return _Op(self.u >> 56) -} - -func (self _Instr) vi() int { - return rt.UnpackInt(self.u) -} - -func (self _Instr) vf() uint8 { - return (*rt.GoType)(self.p).KindFlags -} - -func (self _Instr) vs() (v string) { - (*rt.GoString)(unsafe.Pointer(&v)).Ptr = self.p - (*rt.GoString)(unsafe.Pointer(&v)).Len = self.vi() - return -} - -func (self _Instr) vk() reflect.Kind { - return (*rt.GoType)(self.p).Kind() -} - -func (self _Instr) vt() reflect.Type { - return (*rt.GoType)(self.p).Pack() -} - -func (self _Instr) vp() (vt reflect.Type, pv bool) { - return (*rt.GoType)(self.p).Pack(), rt.UnpackInt(self.u) == 1 -} - -func (self _Instr) i64() int64 { - return int64(self.vi()) -} - -func (self _Instr) vlen() int { - return int((*rt.GoType)(self.p).Size) -} - -func (self _Instr) isBranch() bool { - switch self.op() { - case _OP_goto : fallthrough - case _OP_is_nil : fallthrough - case _OP_is_nil_p1 : fallthrough - case _OP_is_zero_1 : fallthrough - case _OP_is_zero_2 : fallthrough - case _OP_is_zero_4 : fallthrough - case _OP_is_zero_8 : fallthrough - case _OP_map_check_key : fallthrough - case _OP_map_write_key : fallthrough - case _OP_slice_next : fallthrough - case _OP_cond_testc : return true - default : return false - } -} - -func (self _Instr) disassemble() string { - switch self.op() { - case _OP_byte : return fmt.Sprintf("%-18s%s", self.op().String(), strconv.QuoteRune(rune(self.vi()))) - case _OP_text : return fmt.Sprintf("%-18s%s", self.op().String(), strconv.Quote(self.vs())) - case _OP_index : return fmt.Sprintf("%-18s%d", self.op().String(), self.vi()) - case _OP_recurse : fallthrough - case _OP_map_iter : fallthrough - case _OP_marshal : fallthrough - case _OP_marshal_p : fallthrough - case _OP_marshal_text : fallthrough - case _OP_marshal_text_p : return fmt.Sprintf("%-18s%s", self.op().String(), self.vt()) - case _OP_goto : fallthrough - case _OP_is_nil : fallthrough - case _OP_is_nil_p1 : fallthrough - case _OP_is_zero_1 : fallthrough - case _OP_is_zero_2 : fallthrough - case _OP_is_zero_4 : fallthrough - case _OP_is_zero_8 : fallthrough - case _OP_is_zero_map : fallthrough - case _OP_cond_testc : fallthrough - case _OP_map_check_key : fallthrough - case _OP_map_write_key : return fmt.Sprintf("%-18sL_%d", self.op().String(), self.vi()) - case _OP_slice_next : return fmt.Sprintf("%-18sL_%d, %s", self.op().String(), self.vi(), self.vt()) - default : return self.op().String() - } -} - -type ( - _Program []_Instr -) - -func (self _Program) pc() int { - return len(self) -} - -func (self _Program) tag(n int) { - if n >= _MaxStack { - panic("type nesting too deep") - } -} - -func (self _Program) pin(i int) { - v := &self[i] - v.u &= 0xffff000000000000 - v.u |= rt.PackInt(self.pc()) -} - -func (self _Program) rel(v []int) { - for _, i := range v { - self.pin(i) - } -} - -func (self *_Program) add(op _Op) { - *self = append(*self, newInsOp(op)) -} - -func (self *_Program) key(op _Op) { - *self = append(*self, - newInsVi(_OP_byte, '"'), - newInsOp(op), - newInsVi(_OP_byte, '"'), - ) -} - -func (self *_Program) int(op _Op, vi int) { - *self = append(*self, newInsVi(op, vi)) -} - -func (self *_Program) str(op _Op, vs string) { - *self = append(*self, newInsVs(op, vs)) -} - -func (self *_Program) rtt(op _Op, vt reflect.Type) { - *self = append(*self, newInsVt(op, vt)) -} - -func (self *_Program) vp(op _Op, vt reflect.Type, pv bool) { - *self = append(*self, newInsVp(op, vt, pv)) -} - -func (self _Program) disassemble() string { - nb := len(self) - tab := make([]bool, nb + 1) - ret := make([]string, 0, nb + 1) - - /* prescan to get all the labels */ - for _, ins := range self { - if ins.isBranch() { - tab[ins.vi()] = true - } - } - - /* disassemble each instruction */ - for i, ins := range self { - if !tab[i] { - ret = append(ret, "\t" + ins.disassemble()) - } else { - ret = append(ret, fmt.Sprintf("L_%d:\n\t%s", i, ins.disassemble())) - } - } - - /* add the last label, if needed */ - if tab[nb] { - ret = append(ret, fmt.Sprintf("L_%d:", nb)) - } - - /* add an "end" indicator, and join all the strings */ - return strings.Join(append(ret, "\tend"), "\n") -} - -type _Compiler struct { - opts option.CompileOptions - pv bool - tab map[reflect.Type]bool - rec map[reflect.Type]uint8 -} - -func newCompiler() *_Compiler { - return &_Compiler { - opts: option.DefaultCompileOptions(), - tab: map[reflect.Type]bool{}, - rec: map[reflect.Type]uint8{}, - } -} - -func (self *_Compiler) apply(opts option.CompileOptions) *_Compiler { - self.opts = opts - if self.opts.RecursiveDepth > 0 { - self.rec = map[reflect.Type]uint8{} - } - return self -} - -func (self *_Compiler) rescue(ep *error) { - if val := recover(); val != nil { - if err, ok := val.(error); ok { - *ep = err - } else { - panic(val) - } - } -} - -func (self *_Compiler) compile(vt reflect.Type, pv bool) (ret _Program, err error) { - defer self.rescue(&err) - self.compileOne(&ret, 0, vt, pv) - return -} - -func (self *_Compiler) compileOne(p *_Program, sp int, vt reflect.Type, pv bool) { - if self.tab[vt] { - p.vp(_OP_recurse, vt, pv) - } else { - self.compileRec(p, sp, vt, pv) - } -} - -func (self *_Compiler) compileRec(p *_Program, sp int, vt reflect.Type, pv bool) { - pr := self.pv - pt := reflect.PtrTo(vt) - - /* check for addressable `json.Marshaler` with pointer receiver */ - if pv && pt.Implements(jsonMarshalerType) { - p.rtt(_OP_marshal_p, pt) - return - } - - /* check for `json.Marshaler` */ - if vt.Implements(jsonMarshalerType) { - self.compileMarshaler(p, _OP_marshal, vt, jsonMarshalerType) - return - } - - /* check for addressable `encoding.TextMarshaler` with pointer receiver */ - if pv && pt.Implements(encodingTextMarshalerType) { - p.rtt(_OP_marshal_text_p, pt) - return - } - - /* check for `encoding.TextMarshaler` */ - if vt.Implements(encodingTextMarshalerType) { - self.compileMarshaler(p, _OP_marshal_text, vt, encodingTextMarshalerType) - return - } - - /* enter the recursion, and compile the type */ - self.pv = pv - self.tab[vt] = true - self.compileOps(p, sp, vt) - - /* exit the recursion */ - self.pv = pr - delete(self.tab, vt) -} - -func (self *_Compiler) compileOps(p *_Program, sp int, vt reflect.Type) { - switch vt.Kind() { - case reflect.Bool : p.add(_OP_bool) - case reflect.Int : p.add(_OP_int()) - case reflect.Int8 : p.add(_OP_i8) - case reflect.Int16 : p.add(_OP_i16) - case reflect.Int32 : p.add(_OP_i32) - case reflect.Int64 : p.add(_OP_i64) - case reflect.Uint : p.add(_OP_uint()) - case reflect.Uint8 : p.add(_OP_u8) - case reflect.Uint16 : p.add(_OP_u16) - case reflect.Uint32 : p.add(_OP_u32) - case reflect.Uint64 : p.add(_OP_u64) - case reflect.Uintptr : p.add(_OP_uintptr()) - case reflect.Float32 : p.add(_OP_f32) - case reflect.Float64 : p.add(_OP_f64) - case reflect.String : self.compileString (p, vt) - case reflect.Array : self.compileArray (p, sp, vt.Elem(), vt.Len()) - case reflect.Interface : self.compileInterface (p, vt) - case reflect.Map : self.compileMap (p, sp, vt) - case reflect.Ptr : self.compilePtr (p, sp, vt.Elem()) - case reflect.Slice : self.compileSlice (p, sp, vt.Elem()) - case reflect.Struct : self.compileStruct (p, sp, vt) - default : panic (error_type(vt)) - } -} - -func (self *_Compiler) compileNil(p *_Program, sp int, vt reflect.Type, nil_op _Op, fn func(*_Program, int, reflect.Type)) { - x := p.pc() - p.add(_OP_is_nil) - fn(p, sp, vt) - e := p.pc() - p.add(_OP_goto) - p.pin(x) - p.add(nil_op) - p.pin(e) -} - -func (self *_Compiler) compilePtr(p *_Program, sp int, vt reflect.Type) { - self.compileNil(p, sp, vt, _OP_null, self.compilePtrBody) -} - -func (self *_Compiler) compilePtrBody(p *_Program, sp int, vt reflect.Type) { - p.tag(sp) - p.add(_OP_save) - p.add(_OP_deref) - self.compileOne(p, sp + 1, vt, true) - p.add(_OP_drop) -} - -func (self *_Compiler) compileMap(p *_Program, sp int, vt reflect.Type) { - self.compileNil(p, sp, vt, _OP_empty_obj, self.compileMapBody) -} - -func (self *_Compiler) compileMapBody(p *_Program, sp int, vt reflect.Type) { - p.tag(sp + 1) - p.int(_OP_byte, '{') - p.add(_OP_save) - p.rtt(_OP_map_iter, vt) - p.add(_OP_save) - i := p.pc() - p.add(_OP_map_check_key) - u := p.pc() - p.add(_OP_map_write_key) - self.compileMapBodyKey(p, vt.Key()) - p.pin(u) - p.int(_OP_byte, ':') - p.add(_OP_map_value_next) - self.compileOne(p, sp + 2, vt.Elem(), false) - j := p.pc() - p.add(_OP_map_check_key) - p.int(_OP_byte, ',') - v := p.pc() - p.add(_OP_map_write_key) - self.compileMapBodyKey(p, vt.Key()) - p.pin(v) - p.int(_OP_byte, ':') - p.add(_OP_map_value_next) - self.compileOne(p, sp + 2, vt.Elem(), false) - p.int(_OP_goto, j) - p.pin(i) - p.pin(j) - p.add(_OP_map_stop) - p.add(_OP_drop_2) - p.int(_OP_byte, '}') -} - -func (self *_Compiler) compileMapBodyKey(p *_Program, vk reflect.Type) { - if !vk.Implements(encodingTextMarshalerType) { - self.compileMapBodyTextKey(p, vk) - } else { - self.compileMapBodyUtextKey(p, vk) - } -} - -func (self *_Compiler) compileMapBodyTextKey(p *_Program, vk reflect.Type) { - switch vk.Kind() { - case reflect.Invalid : panic("map key is nil") - case reflect.Bool : p.key(_OP_bool) - case reflect.Int : p.key(_OP_int()) - case reflect.Int8 : p.key(_OP_i8) - case reflect.Int16 : p.key(_OP_i16) - case reflect.Int32 : p.key(_OP_i32) - case reflect.Int64 : p.key(_OP_i64) - case reflect.Uint : p.key(_OP_uint()) - case reflect.Uint8 : p.key(_OP_u8) - case reflect.Uint16 : p.key(_OP_u16) - case reflect.Uint32 : p.key(_OP_u32) - case reflect.Uint64 : p.key(_OP_u64) - case reflect.Uintptr : p.key(_OP_uintptr()) - case reflect.Float32 : p.key(_OP_f32) - case reflect.Float64 : p.key(_OP_f64) - case reflect.String : self.compileString(p, vk) - default : panic(error_type(vk)) - } -} - -func (self *_Compiler) compileMapBodyUtextKey(p *_Program, vk reflect.Type) { - if vk.Kind() != reflect.Ptr { - p.rtt(_OP_marshal_text, vk) - } else { - self.compileMapBodyUtextPtr(p, vk) - } -} - -func (self *_Compiler) compileMapBodyUtextPtr(p *_Program, vk reflect.Type) { - i := p.pc() - p.add(_OP_is_nil) - p.rtt(_OP_marshal_text, vk) - j := p.pc() - p.add(_OP_goto) - p.pin(i) - p.str(_OP_text, "\"\"") - p.pin(j) -} - -func (self *_Compiler) compileSlice(p *_Program, sp int, vt reflect.Type) { - self.compileNil(p, sp, vt, _OP_empty_arr, self.compileSliceBody) -} - -func (self *_Compiler) compileSliceBody(p *_Program, sp int, vt reflect.Type) { - if isSimpleByte(vt) { - p.add(_OP_bin) - } else { - self.compileSliceArray(p, sp, vt) - } -} - -func (self *_Compiler) compileSliceArray(p *_Program, sp int, vt reflect.Type) { - p.tag(sp) - p.int(_OP_byte, '[') - p.add(_OP_save) - p.add(_OP_slice_len) - i := p.pc() - p.rtt(_OP_slice_next, vt) - self.compileOne(p, sp + 1, vt, true) - j := p.pc() - p.rtt(_OP_slice_next, vt) - p.int(_OP_byte, ',') - self.compileOne(p, sp + 1, vt, true) - p.int(_OP_goto, j) - p.pin(i) - p.pin(j) - p.add(_OP_drop) - p.int(_OP_byte, ']') -} - -func (self *_Compiler) compileArray(p *_Program, sp int, vt reflect.Type, nb int) { - p.tag(sp) - p.int(_OP_byte, '[') - p.add(_OP_save) - - /* first item */ - if nb != 0 { - self.compileOne(p, sp + 1, vt, self.pv) - p.add(_OP_load) - } - - /* remaining items */ - for i := 1; i < nb; i++ { - p.int(_OP_byte, ',') - p.int(_OP_index, i * int(vt.Size())) - self.compileOne(p, sp + 1, vt, self.pv) - p.add(_OP_load) - } - - /* end of array */ - p.add(_OP_drop) - p.int(_OP_byte, ']') -} - -func (self *_Compiler) compileString(p *_Program, vt reflect.Type) { - if vt != jsonNumberType { - p.add(_OP_str) - } else { - p.add(_OP_number) - } -} - -func (self *_Compiler) compileStruct(p *_Program, sp int, vt reflect.Type) { - if sp >= self.opts.MaxInlineDepth || p.pc() >= _MAX_ILBUF || (sp > 0 && vt.NumField() >= _MAX_FIELDS) { - p.vp(_OP_recurse, vt, self.pv) - if self.opts.RecursiveDepth > 0 { - if self.pv { - self.rec[vt] = 1 - } else { - self.rec[vt] = 0 - } - } - } else { - self.compileStructBody(p, sp, vt) - } -} - -func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) { - p.tag(sp) - p.int(_OP_byte, '{') - p.add(_OP_save) - p.add(_OP_cond_set) - - /* compile each field */ - for _, fv := range resolver.ResolveStruct(vt) { - var s []int - var o resolver.Offset - - /* "omitempty" for arrays */ - if fv.Type.Kind() == reflect.Array { - if fv.Type.Len() == 0 && (fv.Opts & resolver.F_omitempty) != 0 { - continue - } - } - - /* index to the field */ - for _, o = range fv.Path { - if p.int(_OP_index, int(o.Size)); o.Kind == resolver.F_deref { - s = append(s, p.pc()) - p.add(_OP_is_nil) - p.add(_OP_deref) - } - } - - /* check for "omitempty" option */ - if fv.Type.Kind() != reflect.Struct && fv.Type.Kind() != reflect.Array && (fv.Opts & resolver.F_omitempty) != 0 { - s = append(s, p.pc()) - self.compileStructFieldZero(p, fv.Type) - } - - /* add the comma if not the first element */ - i := p.pc() - p.add(_OP_cond_testc) - p.int(_OP_byte, ',') - p.pin(i) - - /* compile the key and value */ - ft := fv.Type - p.str(_OP_text, Quote(fv.Name) + ":") - - /* check for "stringnize" option */ - if (fv.Opts & resolver.F_stringize) == 0 { - self.compileOne(p, sp + 1, ft, self.pv) - } else { - self.compileStructFieldStr(p, sp + 1, ft) - } - - /* patch the skipping jumps and reload the struct pointer */ - p.rel(s) - p.add(_OP_load) - } - - /* end of object */ - p.add(_OP_drop) - p.int(_OP_byte, '}') -} - -func (self *_Compiler) compileStructFieldStr(p *_Program, sp int, vt reflect.Type) { - pc := -1 - ft := vt - sv := false - - /* dereference the pointer if needed */ - if ft.Kind() == reflect.Ptr { - ft = ft.Elem() - } - - /* check if it can be stringized */ - switch ft.Kind() { - case reflect.Bool : sv = true - case reflect.Int : sv = true - case reflect.Int8 : sv = true - case reflect.Int16 : sv = true - case reflect.Int32 : sv = true - case reflect.Int64 : sv = true - case reflect.Uint : sv = true - case reflect.Uint8 : sv = true - case reflect.Uint16 : sv = true - case reflect.Uint32 : sv = true - case reflect.Uint64 : sv = true - case reflect.Uintptr : sv = true - case reflect.Float32 : sv = true - case reflect.Float64 : sv = true - case reflect.String : sv = true - } - - /* if it's not, ignore the "string" and follow the regular path */ - if !sv { - self.compileOne(p, sp, vt, self.pv) - return - } - - /* dereference the pointer */ - if vt.Kind() == reflect.Ptr { - pc = p.pc() - vt = vt.Elem() - p.add(_OP_is_nil) - p.add(_OP_deref) - } - - /* special case of a double-quoted string */ - if ft != jsonNumberType && ft.Kind() == reflect.String { - p.add(_OP_quote) - } else { - self.compileStructFieldQuoted(p, sp, vt) - } - - /* the "null" case of the pointer */ - if pc != -1 { - e := p.pc() - p.add(_OP_goto) - p.pin(pc) - p.add(_OP_null) - p.pin(e) - } -} - -func (self *_Compiler) compileStructFieldZero(p *_Program, vt reflect.Type) { - switch vt.Kind() { - case reflect.Bool : p.add(_OP_is_zero_1) - case reflect.Int : p.add(_OP_is_zero_ints()) - case reflect.Int8 : p.add(_OP_is_zero_1) - case reflect.Int16 : p.add(_OP_is_zero_2) - case reflect.Int32 : p.add(_OP_is_zero_4) - case reflect.Int64 : p.add(_OP_is_zero_8) - case reflect.Uint : p.add(_OP_is_zero_ints()) - case reflect.Uint8 : p.add(_OP_is_zero_1) - case reflect.Uint16 : p.add(_OP_is_zero_2) - case reflect.Uint32 : p.add(_OP_is_zero_4) - case reflect.Uint64 : p.add(_OP_is_zero_8) - case reflect.Uintptr : p.add(_OP_is_nil) - case reflect.Float32 : p.add(_OP_is_zero_4) - case reflect.Float64 : p.add(_OP_is_zero_8) - case reflect.String : p.add(_OP_is_nil_p1) - case reflect.Interface : p.add(_OP_is_nil_p1) - case reflect.Map : p.add(_OP_is_zero_map) - case reflect.Ptr : p.add(_OP_is_nil) - case reflect.Slice : p.add(_OP_is_nil_p1) - default : panic(error_type(vt)) - } -} - -func (self *_Compiler) compileStructFieldQuoted(p *_Program, sp int, vt reflect.Type) { - p.int(_OP_byte, '"') - self.compileOne(p, sp, vt, self.pv) - p.int(_OP_byte, '"') -} - -func (self *_Compiler) compileInterface(p *_Program, vt reflect.Type) { - x := p.pc() - p.add(_OP_is_nil_p1) - - /* iface and efaces are different */ - if vt.NumMethod() == 0 { - p.add(_OP_eface) - } else { - p.add(_OP_iface) - } - - /* the "null" value */ - e := p.pc() - p.add(_OP_goto) - p.pin(x) - p.add(_OP_null) - p.pin(e) -} - -func (self *_Compiler) compileMarshaler(p *_Program, op _Op, vt reflect.Type, mt reflect.Type) { - pc := p.pc() - vk := vt.Kind() - - /* direct receiver */ - if vk != reflect.Ptr { - p.rtt(op, vt) - return - } - - /* value receiver with a pointer type, check for nil before calling the marshaler */ - p.add(_OP_is_nil) - p.rtt(op, vt) - i := p.pc() - p.add(_OP_goto) - p.pin(pc) - p.add(_OP_null) - p.pin(i) +func ForceUseVM() { + vm.SetCompiler(makeEncoderVM) + pretouchType = pretouchTypeVM + encodeTypedPointer = vm.EncodeTypedPointer + vars.UseVM = true +} + +var encodeTypedPointer func(buf *[]byte, vt *rt.GoType, vp *unsafe.Pointer, sb *vars.Stack, fv uint64) error + +func makeEncoderVM(vt *rt.GoType, ex ...interface{}) (interface{}, error) { + pp, err := NewCompiler().Compile(vt.Pack(), ex[0].(bool)) + if err != nil { + return nil, err + } + return &pp, nil +} + +var pretouchType func(_vt reflect.Type, opts option.CompileOptions, v uint8) (map[reflect.Type]uint8, error) + +func pretouchTypeVM(_vt reflect.Type, opts option.CompileOptions, v uint8) (map[reflect.Type]uint8, error) { + /* compile function */ + compiler := NewCompiler().apply(opts) + + /* find or compile */ + vt := rt.UnpackType(_vt) + if val := vars.GetProgram(vt); val != nil { + return nil, nil + } else if _, err := vars.ComputeProgram(vt, makeEncoderVM, v == 1); err == nil { + return compiler.rec, nil + } else { + return nil, err + } +} + +func pretouchRec(vtm map[reflect.Type]uint8, opts option.CompileOptions) error { + if opts.RecursiveDepth < 0 || len(vtm) == 0 { + return nil + } + next := make(map[reflect.Type]uint8) + for vt, v := range vtm { + sub, err := pretouchType(vt, opts, v) + if err != nil { + return err + } + for svt, v := range sub { + next[svt] = v + } + } + opts.RecursiveDepth -= 1 + return pretouchRec(next, opts) +} + +type Compiler struct { + opts option.CompileOptions + pv bool + tab map[reflect.Type]bool + rec map[reflect.Type]uint8 +} + +func NewCompiler() *Compiler { + return &Compiler{ + opts: option.DefaultCompileOptions(), + tab: map[reflect.Type]bool{}, + rec: map[reflect.Type]uint8{}, + } +} + +func (self *Compiler) apply(opts option.CompileOptions) *Compiler { + self.opts = opts + if self.opts.RecursiveDepth > 0 { + self.rec = map[reflect.Type]uint8{} + } + return self +} + +func (self *Compiler) rescue(ep *error) { + if val := recover(); val != nil { + if err, ok := val.(error); ok { + *ep = err + } else { + panic(val) + } + } +} + +func (self *Compiler) Compile(vt reflect.Type, pv bool) (ret ir.Program, err error) { + defer self.rescue(&err) + self.compileOne(&ret, 0, vt, pv) + return +} + +func (self *Compiler) compileOne(p *ir.Program, sp int, vt reflect.Type, pv bool) { + if self.tab[vt] { + p.Vp(ir.OP_recurse, vt, pv) + } else { + self.compileRec(p, sp, vt, pv) + } +} + +func (self *Compiler) tryCompileMarshaler(p *ir.Program, vt reflect.Type, pv bool) bool { + pt := reflect.PtrTo(vt) + + /* check for addressable `json.Marshaler` with pointer receiver */ + if pv && pt.Implements(vars.JsonMarshalerType) { + addMarshalerOp(p, ir.OP_marshal_p, pt, vars.JsonMarshalerType) + return true + } + + /* check for `json.Marshaler` */ + if vt.Implements(vars.JsonMarshalerType) { + self.compileMarshaler(p, ir.OP_marshal, vt, vars.JsonMarshalerType) + return true + } + + /* check for addressable `encoding.TextMarshaler` with pointer receiver */ + if pv && pt.Implements(vars.EncodingTextMarshalerType) { + addMarshalerOp(p, ir.OP_marshal_text_p, pt, vars.EncodingTextMarshalerType) + return true + } + + /* check for `encoding.TextMarshaler` */ + if vt.Implements(vars.EncodingTextMarshalerType) { + self.compileMarshaler(p, ir.OP_marshal_text, vt, vars.EncodingTextMarshalerType) + return true + } + + return false +} + +func (self *Compiler) compileRec(p *ir.Program, sp int, vt reflect.Type, pv bool) { + pr := self.pv + + if self.tryCompileMarshaler(p, vt, pv) { + return + } + + /* enter the recursion, and compile the type */ + self.pv = pv + self.tab[vt] = true + self.compileOps(p, sp, vt) + + /* exit the recursion */ + self.pv = pr + delete(self.tab, vt) +} + +func (self *Compiler) compileOps(p *ir.Program, sp int, vt reflect.Type) { + switch vt.Kind() { + case reflect.Bool: + p.Add(ir.OP_bool) + case reflect.Int: + p.Add(ir.OP_int()) + case reflect.Int8: + p.Add(ir.OP_i8) + case reflect.Int16: + p.Add(ir.OP_i16) + case reflect.Int32: + p.Add(ir.OP_i32) + case reflect.Int64: + p.Add(ir.OP_i64) + case reflect.Uint: + p.Add(ir.OP_uint()) + case reflect.Uint8: + p.Add(ir.OP_u8) + case reflect.Uint16: + p.Add(ir.OP_u16) + case reflect.Uint32: + p.Add(ir.OP_u32) + case reflect.Uint64: + p.Add(ir.OP_u64) + case reflect.Uintptr: + p.Add(ir.OP_uintptr()) + case reflect.Float32: + p.Add(ir.OP_f32) + case reflect.Float64: + p.Add(ir.OP_f64) + case reflect.String: + self.compileString(p, vt) + case reflect.Array: + self.compileArray(p, sp, vt.Elem(), vt.Len()) + case reflect.Interface: + self.compileInterface(p, vt) + case reflect.Map: + self.compileMap(p, sp, vt) + case reflect.Ptr: + self.compilePtr(p, sp, vt.Elem()) + case reflect.Slice: + self.compileSlice(p, sp, vt.Elem()) + case reflect.Struct: + self.compileStruct(p, sp, vt) + default: + panic(vars.Error_type(vt)) + } +} + +func (self *Compiler) compileNil(p *ir.Program, sp int, vt reflect.Type, nil_op ir.Op, fn func(*ir.Program, int, reflect.Type)) { + x := p.PC() + p.Add(ir.OP_is_nil) + fn(p, sp, vt) + e := p.PC() + p.Add(ir.OP_goto) + p.Pin(x) + p.Add(nil_op) + p.Pin(e) +} + +func (self *Compiler) compilePtr(p *ir.Program, sp int, vt reflect.Type) { + self.compileNil(p, sp, vt, ir.OP_null, self.compilePtrBody) +} + +func (self *Compiler) compilePtrBody(p *ir.Program, sp int, vt reflect.Type) { + p.Tag(sp) + p.Add(ir.OP_save) + p.Add(ir.OP_deref) + self.compileOne(p, sp+1, vt, true) + p.Add(ir.OP_drop) +} + +func (self *Compiler) compileMap(p *ir.Program, sp int, vt reflect.Type) { + self.compileNil(p, sp, vt, ir.OP_empty_obj, self.compileMapBody) +} + +func (self *Compiler) compileMapBody(p *ir.Program, sp int, vt reflect.Type) { + p.Tag(sp + 1) + p.Int(ir.OP_byte, '{') + e := p.PC() + p.Add(ir.OP_is_zero_map) + p.Add(ir.OP_save) + p.Rtt(ir.OP_map_iter, vt) + p.Add(ir.OP_save) + i := p.PC() + p.Add(ir.OP_map_check_key) + u := p.PC() + p.Add(ir.OP_map_write_key) + self.compileMapBodyKey(p, vt.Key()) + p.Pin(u) + p.Int(ir.OP_byte, ':') + p.Add(ir.OP_map_value_next) + self.compileOne(p, sp+2, vt.Elem(), false) + j := p.PC() + p.Add(ir.OP_map_check_key) + p.Int(ir.OP_byte, ',') + v := p.PC() + p.Add(ir.OP_map_write_key) + self.compileMapBodyKey(p, vt.Key()) + p.Pin(v) + p.Int(ir.OP_byte, ':') + p.Add(ir.OP_map_value_next) + self.compileOne(p, sp+2, vt.Elem(), false) + p.Int(ir.OP_goto, j) + p.Pin(i) + p.Pin(j) + p.Add(ir.OP_map_stop) + p.Add(ir.OP_drop_2) + p.Pin(e) + p.Int(ir.OP_byte, '}') +} + +func (self *Compiler) compileMapBodyKey(p *ir.Program, vk reflect.Type) { + if !vk.Implements(vars.EncodingTextMarshalerType) { + self.compileMapBodyTextKey(p, vk) + } else { + self.compileMapBodyUtextKey(p, vk) + } +} + +func (self *Compiler) compileMapBodyTextKey(p *ir.Program, vk reflect.Type) { + switch vk.Kind() { + case reflect.Invalid: + panic("map key is nil") + case reflect.Bool: + p.Key(ir.OP_bool) + case reflect.Int: + p.Key(ir.OP_int()) + case reflect.Int8: + p.Key(ir.OP_i8) + case reflect.Int16: + p.Key(ir.OP_i16) + case reflect.Int32: + p.Key(ir.OP_i32) + case reflect.Int64: + p.Key(ir.OP_i64) + case reflect.Uint: + p.Key(ir.OP_uint()) + case reflect.Uint8: + p.Key(ir.OP_u8) + case reflect.Uint16: + p.Key(ir.OP_u16) + case reflect.Uint32: + p.Key(ir.OP_u32) + case reflect.Uint64: + p.Key(ir.OP_u64) + case reflect.Uintptr: + p.Key(ir.OP_uintptr()) + case reflect.Float32: + p.Key(ir.OP_f32) + case reflect.Float64: + p.Key(ir.OP_f64) + case reflect.String: + self.compileString(p, vk) + default: + panic(vars.Error_type(vk)) + } +} + +func (self *Compiler) compileMapBodyUtextKey(p *ir.Program, vk reflect.Type) { + if vk.Kind() != reflect.Ptr { + addMarshalerOp(p, ir.OP_marshal_text, vk, vars.EncodingTextMarshalerType) + } else { + self.compileMapBodyUtextPtr(p, vk) + } +} + +func (self *Compiler) compileMapBodyUtextPtr(p *ir.Program, vk reflect.Type) { + i := p.PC() + p.Add(ir.OP_is_nil) + addMarshalerOp(p, ir.OP_marshal_text, vk, vars.EncodingTextMarshalerType) + j := p.PC() + p.Add(ir.OP_goto) + p.Pin(i) + p.Str(ir.OP_text, "\"\"") + p.Pin(j) +} + +func (self *Compiler) compileSlice(p *ir.Program, sp int, vt reflect.Type) { + self.compileNil(p, sp, vt, ir.OP_empty_arr, self.compileSliceBody) +} + +func (self *Compiler) compileSliceBody(p *ir.Program, sp int, vt reflect.Type) { + if vars.IsSimpleByte(vt) { + p.Add(ir.OP_bin) + } else { + self.compileSliceArray(p, sp, vt) + } +} + +func (self *Compiler) compileSliceArray(p *ir.Program, sp int, vt reflect.Type) { + p.Tag(sp) + p.Int(ir.OP_byte, '[') + e := p.PC() + p.Add(ir.OP_is_nil) + p.Add(ir.OP_save) + p.Add(ir.OP_slice_len) + i := p.PC() + p.Rtt(ir.OP_slice_next, vt) + self.compileOne(p, sp+1, vt, true) + j := p.PC() + p.Rtt(ir.OP_slice_next, vt) + p.Int(ir.OP_byte, ',') + self.compileOne(p, sp+1, vt, true) + p.Int(ir.OP_goto, j) + p.Pin(i) + p.Pin(j) + p.Add(ir.OP_drop) + p.Pin(e) + p.Int(ir.OP_byte, ']') +} + +func (self *Compiler) compileArray(p *ir.Program, sp int, vt reflect.Type, nb int) { + p.Tag(sp) + p.Int(ir.OP_byte, '[') + p.Add(ir.OP_save) + + /* first item */ + if nb != 0 { + self.compileOne(p, sp+1, vt, self.pv) + p.Add(ir.OP_load) + } + + /* remaining items */ + for i := 1; i < nb; i++ { + p.Int(ir.OP_byte, ',') + p.Int(ir.OP_index, i*int(vt.Size())) + self.compileOne(p, sp+1, vt, self.pv) + p.Add(ir.OP_load) + } + + /* end of array */ + p.Add(ir.OP_drop) + p.Int(ir.OP_byte, ']') +} + +func (self *Compiler) compileString(p *ir.Program, vt reflect.Type) { + if vt != vars.JsonNumberType { + p.Add(ir.OP_str) + } else { + p.Add(ir.OP_number) + } +} + +func (self *Compiler) compileStruct(p *ir.Program, sp int, vt reflect.Type) { + if sp >= self.opts.MaxInlineDepth || p.PC() >= vars.MAX_ILBUF || (sp > 0 && vt.NumField() >= vars.MAX_FIELDS) { + p.Vp(ir.OP_recurse, vt, self.pv) + if self.opts.RecursiveDepth > 0 { + if self.pv { + self.rec[vt] = 1 + } else { + self.rec[vt] = 0 + } + } + } else { + self.compileStructBody(p, sp, vt) + } +} + +func (self *Compiler) compileStructBody(p *ir.Program, sp int, vt reflect.Type) { + p.Tag(sp) + p.Int(ir.OP_byte, '{') + p.Add(ir.OP_save) + p.Add(ir.OP_cond_set) + + /* compile each field */ + for _, fv := range resolver.ResolveStruct(vt) { + var s []int + var o resolver.Offset + + /* "omitempty" for arrays */ + if fv.Type.Kind() == reflect.Array { + if fv.Type.Len() == 0 && (fv.Opts&resolver.F_omitempty) != 0 { + continue + } + } + + /* index to the field */ + for _, o = range fv.Path { + if p.Int(ir.OP_index, int(o.Size)); o.Kind == resolver.F_deref { + s = append(s, p.PC()) + p.Add(ir.OP_is_nil) + p.Add(ir.OP_deref) + } + } + + /* check for "omitempty" option */ + if fv.Type.Kind() != reflect.Struct && fv.Type.Kind() != reflect.Array && (fv.Opts&resolver.F_omitempty) != 0 { + s = append(s, p.PC()) + self.compileStructFieldZero(p, fv.Type) + } + + /* add the comma if not the first element */ + i := p.PC() + p.Add(ir.OP_cond_testc) + p.Int(ir.OP_byte, ',') + p.Pin(i) + + /* compile the key and value */ + ft := fv.Type + p.Str(ir.OP_text, Quote(fv.Name)+":") + + /* check for "stringnize" option */ + if (fv.Opts & resolver.F_stringize) == 0 { + self.compileOne(p, sp+1, ft, self.pv) + } else { + self.compileStructFieldStr(p, sp+1, ft) + } + + /* patch the skipping jumps and reload the struct pointer */ + p.Rel(s) + p.Add(ir.OP_load) + } + + /* end of object */ + p.Add(ir.OP_drop) + p.Int(ir.OP_byte, '}') +} + +func (self *Compiler) compileStructFieldStr(p *ir.Program, sp int, vt reflect.Type) { + // NOTICE: according to encoding/json, Marshaler type has higher priority than string option + // see issue: + if self.tryCompileMarshaler(p, vt, self.pv) { + return + } + + pc := -1 + ft := vt + sv := false + + /* dereference the pointer if needed */ + if ft.Kind() == reflect.Ptr { + ft = ft.Elem() + } + + /* check if it can be stringized */ + switch ft.Kind() { + case reflect.Bool: + sv = true + case reflect.Int: + sv = true + case reflect.Int8: + sv = true + case reflect.Int16: + sv = true + case reflect.Int32: + sv = true + case reflect.Int64: + sv = true + case reflect.Uint: + sv = true + case reflect.Uint8: + sv = true + case reflect.Uint16: + sv = true + case reflect.Uint32: + sv = true + case reflect.Uint64: + sv = true + case reflect.Uintptr: + sv = true + case reflect.Float32: + sv = true + case reflect.Float64: + sv = true + case reflect.String: + sv = true + } + + /* if it's not, ignore the "string" and follow the regular path */ + if !sv { + self.compileOne(p, sp, vt, self.pv) + return + } + + /* dereference the pointer */ + if vt.Kind() == reflect.Ptr { + pc = p.PC() + vt = vt.Elem() + p.Add(ir.OP_is_nil) + p.Add(ir.OP_deref) + } + + /* special case of a double-quoted string */ + if ft != vars.JsonNumberType && ft.Kind() == reflect.String { + p.Add(ir.OP_quote) + } else { + self.compileStructFieldQuoted(p, sp, vt) + } + + /* the "null" case of the pointer */ + if pc != -1 { + e := p.PC() + p.Add(ir.OP_goto) + p.Pin(pc) + p.Add(ir.OP_null) + p.Pin(e) + } +} + +func (self *Compiler) compileStructFieldZero(p *ir.Program, vt reflect.Type) { + switch vt.Kind() { + case reflect.Bool: + p.Add(ir.OP_is_zero_1) + case reflect.Int: + p.Add(ir.OP_is_zero_ints()) + case reflect.Int8: + p.Add(ir.OP_is_zero_1) + case reflect.Int16: + p.Add(ir.OP_is_zero_2) + case reflect.Int32: + p.Add(ir.OP_is_zero_4) + case reflect.Int64: + p.Add(ir.OP_is_zero_8) + case reflect.Uint: + p.Add(ir.OP_is_zero_ints()) + case reflect.Uint8: + p.Add(ir.OP_is_zero_1) + case reflect.Uint16: + p.Add(ir.OP_is_zero_2) + case reflect.Uint32: + p.Add(ir.OP_is_zero_4) + case reflect.Uint64: + p.Add(ir.OP_is_zero_8) + case reflect.Uintptr: + p.Add(ir.OP_is_nil) + case reflect.Float32: + p.Add(ir.OP_is_zero_4) + case reflect.Float64: + p.Add(ir.OP_is_zero_8) + case reflect.String: + p.Add(ir.OP_is_nil_p1) + case reflect.Interface: + p.Add(ir.OP_is_nil) + case reflect.Map: + p.Add(ir.OP_is_zero_map) + case reflect.Ptr: + p.Add(ir.OP_is_nil) + case reflect.Slice: + p.Add(ir.OP_is_nil_p1) + default: + panic(vars.Error_type(vt)) + } +} + +func (self *Compiler) compileStructFieldQuoted(p *ir.Program, sp int, vt reflect.Type) { + p.Int(ir.OP_byte, '"') + self.compileOne(p, sp, vt, self.pv) + p.Int(ir.OP_byte, '"') +} + +func (self *Compiler) compileInterface(p *ir.Program, vt reflect.Type) { + x := p.PC() + p.Add(ir.OP_is_nil_p1) + + /* iface and efaces are different */ + if vt.NumMethod() == 0 { + p.Add(ir.OP_eface) + } else { + p.Add(ir.OP_iface) + } + + /* the "null" value */ + e := p.PC() + p.Add(ir.OP_goto) + p.Pin(x) + p.Add(ir.OP_null) + p.Pin(e) +} + +func (self *Compiler) compileMarshaler(p *ir.Program, op ir.Op, vt reflect.Type, mt reflect.Type) { + pc := p.PC() + vk := vt.Kind() + + /* direct receiver */ + if vk != reflect.Ptr { + addMarshalerOp(p, op, vt, mt) + return + } + /* value receiver with a pointer type, check for nil before calling the marshaler */ + p.Add(ir.OP_is_nil) + + addMarshalerOp(p, op, vt, mt) + + i := p.PC() + p.Add(ir.OP_goto) + p.Pin(pc) + p.Add(ir.OP_null) + p.Pin(i) +} + +func addMarshalerOp(p *ir.Program, op ir.Op, vt reflect.Type, mt reflect.Type) { + if vars.UseVM { + itab := rt.GetItab(rt.IfaceType(rt.UnpackType(mt)), rt.UnpackType(vt), true) + p.Vtab(op, vt, itab) + } else { + // OPT: get itab here + p.Rtt(op, vt) + } } diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/debug_go117.go b/vendor/github.com/bytedance/sonic/internal/encoder/debug_go117.go deleted file mode 100644 index e1016de323417f4cc6a10de823206a50b520f59e..0000000000000000000000000000000000000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/debug_go117.go +++ /dev/null @@ -1,205 +0,0 @@ -// +build go1.17,!go1.21 - -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package encoder - -import ( - `fmt` - `os` - `runtime` - `strings` - `unsafe` - - `github.com/bytedance/sonic/internal/jit` - `github.com/twitchyliquid64/golang-asm/obj` -) - -const _FP_debug = 128 - -var ( - debugSyncGC = os.Getenv("SONIC_SYNC_GC") != "" - debugAsyncGC = os.Getenv("SONIC_NO_ASYNC_GC") == "" - debugCheckPtr = os.Getenv("SONIC_CHECK_POINTER") != "" -) - -var ( - _Instr_End = newInsOp(_OP_is_nil) - - _F_gc = jit.Func(gc) - _F_println = jit.Func(println_wrapper) - _F_print = jit.Func(print) -) - -func (self *_Assembler) dsave(r ...obj.Addr) { - for i, v := range r { - if i > _FP_debug / 8 - 1 { - panic("too many registers to save") - } else { - self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + _FP_saves + _FP_locals + int64(i) * 8)) - } - } -} - -func (self *_Assembler) dload(r ...obj.Addr) { - for i, v := range r { - if i > _FP_debug / 8 - 1 { - panic("too many registers to load") - } else { - self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + _FP_saves + _FP_locals + int64(i) * 8), v) - } - } -} - -func println_wrapper(i int, op1 int, op2 int){ - println(i, " Intrs ", op1, _OpNames[op1], "next: ", op2, _OpNames[op2]) -} - -func print(i int){ - println(i) -} - -func gc() { - if !debugSyncGC { - return - } - runtime.GC() - // debug.FreeOSMemory() -} - -func (self *_Assembler) dcall(fn obj.Addr) { - self.Emit("MOVQ", fn, _R10) // MOVQ ${fn}, R10 - self.Rjmp("CALL", _R10) // CALL R10 -} - -func (self *_Assembler) debug_gc() { - if !debugSyncGC { - return - } - self.dsave(_REG_debug...) - self.dcall(_F_gc) - self.dload(_REG_debug...) -} - -func (self *_Assembler) debug_instr(i int, v *_Instr) { - if debugSyncGC { - if i+1 == len(self.p) { - self.print_gc(i, v, &_Instr_End) - } else { - next := &(self.p[i+1]) - self.print_gc(i, v, next) - name := _OpNames[next.op()] - if strings.Contains(name, "save") { - return - } - } - // self.debug_gc() - } -} - -//go:noescape -//go:linkname checkptrBase runtime.checkptrBase -func checkptrBase(p unsafe.Pointer) uintptr - -//go:noescape -//go:linkname findObject runtime.findObject -func findObject(p, refBase, refOff uintptr) (base uintptr, s unsafe.Pointer, objIndex uintptr) - -var ( - _F_checkptr = jit.Func(checkptr) - _F_printptr = jit.Func(printptr) -) - -var ( - _R10 = jit.Reg("R10") -) -var _REG_debug = []obj.Addr { - jit.Reg("AX"), - jit.Reg("BX"), - jit.Reg("CX"), - jit.Reg("DX"), - jit.Reg("DI"), - jit.Reg("SI"), - jit.Reg("BP"), - jit.Reg("SP"), - jit.Reg("R8"), - jit.Reg("R9"), - jit.Reg("R10"), - jit.Reg("R11"), - jit.Reg("R12"), - jit.Reg("R13"), - jit.Reg("R14"), - jit.Reg("R15"), -} - -func checkptr(ptr uintptr) { - if ptr == 0 { - return - } - fmt.Printf("pointer: %x\n", ptr) - f := checkptrBase(unsafe.Pointer(uintptr(ptr))) - if f == 0 { - fmt.Printf("! unknown-based pointer: %x\n", ptr) - } else if f == 1 { - fmt.Printf("! stack pointer: %x\n", ptr) - } else { - fmt.Printf("base: %x\n", f) - } - findobj(ptr) -} - -func findobj(ptr uintptr) { - base, s, objIndex := findObject(ptr, 0, 0) - if s != nil && base == 0 { - fmt.Printf("! invalid pointer: %x\n", ptr) - } - fmt.Printf("objIndex: %d\n", objIndex) -} - -func (self *_Assembler) check_ptr(ptr obj.Addr, lea bool) { - if !debugCheckPtr { - return - } - - self.dsave(_REG_debug...) - if lea { - self.Emit("LEAQ", ptr, _R10) - } else { - self.Emit("MOVQ", ptr, _R10) - } - self.Emit("MOVQ", _R10, jit.Ptr(_SP, 0)) - self.dcall(_F_checkptr) - self.dload(_REG_debug...) -} - -func printptr(i int, ptr uintptr) { - fmt.Printf("[%d] ptr: %x\n", i, ptr) -} - -func (self *_Assembler) print_ptr(i int, ptr obj.Addr, lea bool) { - self.dsave(_REG_debug...) - if lea { - self.Emit("LEAQ", ptr, _R10) - } else { - self.Emit("MOVQ", ptr, _R10) - } - - self.Emit("MOVQ", jit.Imm(int64(i)), _AX) - self.Emit("MOVQ", _R10, _BX) - self.dcall(_F_printptr) - self.dload(_REG_debug...) -} \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/encode_norace.go b/vendor/github.com/bytedance/sonic/internal/encoder/encode_norace.go new file mode 100644 index 0000000000000000000000000000000000000000..c53206433cf8bcdd3b007cd1ef670d6aee813118 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/encoder/encode_norace.go @@ -0,0 +1,24 @@ +//go:build !race +// +build !race + +/* + * Copyright 2021 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package encoder + +func encodeIntoCheckRace(buf *[]byte, val interface{}, opts Options) error { + return encodeInto(buf, val, opts) +} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/encode_race.go b/vendor/github.com/bytedance/sonic/internal/encoder/encode_race.go new file mode 100644 index 0000000000000000000000000000000000000000..c373c55f942f459f8632bf3e51ec5093dd698358 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/encoder/encode_race.go @@ -0,0 +1,54 @@ +//go:build race +// +build race + +/* + * Copyright 2021 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package encoder + +import ( + `encoding/json` + + `github.com/bytedance/sonic/internal/rt` +) + + +func helpDetectDataRace(val interface{}) { + var out []byte + defer func() { + if v := recover(); v != nil { + // NOTICE: help user to locate where panic occurs + println("panic when encoding on: ", truncate(out)) + panic(v) + } + }() + out, _ = json.Marshal(val) +} + +func encodeIntoCheckRace(buf *[]byte, val interface{}, opts Options) error { + err := encodeInto(buf, val, opts) + /* put last to make the panic from sonic will always be caught at first */ + helpDetectDataRace(val) + return err +} + +func truncate(json []byte) string { + if len(json) <= 256 { + return rt.Mem2Str(json) + } else { + return rt.Mem2Str(json[len(json)-256:]) + } +} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/encoder.go b/vendor/github.com/bytedance/sonic/internal/encoder/encoder.go index 3c46061a48035d76d8d3d0e61aea85f0b71ad3ac..4cba1a168a470885f171fa25fdb0d8dc7a130a31 100644 --- a/vendor/github.com/bytedance/sonic/internal/encoder/encoder.go +++ b/vendor/github.com/bytedance/sonic/internal/encoder/encoder.go @@ -17,63 +17,62 @@ package encoder import ( - `bytes` - `encoding/json` - `reflect` - `runtime` - `unsafe` - - `github.com/bytedance/sonic/internal/native` - `github.com/bytedance/sonic/internal/native/types` - `github.com/bytedance/sonic/internal/rt` - `github.com/bytedance/sonic/utf8` - `github.com/bytedance/sonic/option` + "bytes" + "encoding/json" + "reflect" + "runtime" + "unsafe" + + "github.com/bytedance/sonic/utf8" + "github.com/bytedance/sonic/internal/encoder/alg" + "github.com/bytedance/sonic/internal/encoder/vars" + "github.com/bytedance/sonic/internal/rt" + "github.com/bytedance/sonic/option" ) // Options is a set of encoding options. type Options uint64 -const ( - bitSortMapKeys = iota - bitEscapeHTML - bitCompactMarshaler - bitNoQuoteTextMarshaler - bitNoNullSliceOrMap - bitValidateString - - // used for recursive compile - bitPointerValue = 63 -) - const ( // SortMapKeys indicates that the keys of a map needs to be sorted // before serializing into JSON. // WARNING: This hurts performance A LOT, USE WITH CARE. - SortMapKeys Options = 1 << bitSortMapKeys + SortMapKeys Options = 1 << alg.BitSortMapKeys // EscapeHTML indicates encoder to escape all HTML characters // after serializing into JSON (see https://pkg.go.dev/encoding/json#HTMLEscape). // WARNING: This hurts performance A LOT, USE WITH CARE. - EscapeHTML Options = 1 << bitEscapeHTML + EscapeHTML Options = 1 << alg.BitEscapeHTML // CompactMarshaler indicates that the output JSON from json.Marshaler // is always compact and needs no validation - CompactMarshaler Options = 1 << bitCompactMarshaler + CompactMarshaler Options = 1 << alg.BitCompactMarshaler // NoQuoteTextMarshaler indicates that the output text from encoding.TextMarshaler // is always escaped string and needs no quoting - NoQuoteTextMarshaler Options = 1 << bitNoQuoteTextMarshaler + NoQuoteTextMarshaler Options = 1 << alg.BitNoQuoteTextMarshaler // NoNullSliceOrMap indicates all empty Array or Object are encoded as '[]' or '{}', - // instead of 'null' - NoNullSliceOrMap Options = 1 << bitNoNullSliceOrMap + // instead of 'null'. + // NOTE: The priority of this option is lower than json tag `omitempty`. + NoNullSliceOrMap Options = 1 << alg.BitNoNullSliceOrMap // ValidateString indicates that encoder should validate the input string // before encoding it into JSON. - ValidateString Options = 1 << bitValidateString + ValidateString Options = 1 << alg.BitValidateString + + // NoValidateJSONMarshaler indicates that the encoder should not validate the output string + // after encoding the JSONMarshaler to JSON. + NoValidateJSONMarshaler Options = 1 << alg.BitNoValidateJSONMarshaler + + // NoEncoderNewline indicates that the encoder should not add a newline after every message + NoEncoderNewline Options = 1 << alg.BitNoEncoderNewline // CompatibleWithStd is used to be compatible with std encoder. CompatibleWithStd Options = SortMapKeys | EscapeHTML | CompactMarshaler + + // Encode Infinity or Nan float into `null`, instead of returning an error. + EncodeNullForInfOrNan Options = 1 << alg.BitEncodeNullForInfOrNan ) // Encoder represents a specific set of encoder configurations. @@ -115,6 +114,25 @@ func (self *Encoder) SetValidateString(f bool) { } } +// SetNoValidateJSONMarshaler specifies if option NoValidateJSONMarshaler opens +func (self *Encoder) SetNoValidateJSONMarshaler(f bool) { + if f { + self.Opts |= NoValidateJSONMarshaler + } else { + self.Opts &= ^NoValidateJSONMarshaler + } +} + +// SetNoEncoderNewline specifies if option NoEncoderNewline opens +func (self *Encoder) SetNoEncoderNewline(f bool) { + if f { + self.Opts |= NoEncoderNewline + } else { + self.Opts &= ^NoEncoderNewline + } +} + + // SetCompactMarshaler specifies if option CompactMarshaler opens func (self *Encoder) SetCompactMarshaler(f bool) { if f { @@ -143,53 +161,45 @@ func (enc *Encoder) SetIndent(prefix, indent string) { // Quote returns the JSON-quoted version of s. func Quote(s string) string { - var n int - var p []byte - - /* check for empty string */ - if s == "" { - return `""` - } - - /* allocate space for result */ - n = len(s) + 2 - p = make([]byte, 0, n) - - /* call the encoder */ - _ = encodeString(&p, s) - return rt.Mem2Str(p) + buf := make([]byte, 0, len(s)+2) + buf = alg.Quote(buf, s, false) + return rt.Mem2Str(buf) } // Encode returns the JSON encoding of val, encoded with opts. func Encode(val interface{}, opts Options) ([]byte, error) { var ret []byte - buf := newBytes() - err := encodeInto(&buf, val, opts) + buf := vars.NewBytes() + err := encodeIntoCheckRace(buf, val, opts) /* check for errors */ if err != nil { - freeBytes(buf) + vars.FreeBytes(buf) return nil, err } /* htmlescape or correct UTF-8 if opts enable */ old := buf - buf = encodeFinish(old, opts) - pbuf := ((*rt.GoSlice)(unsafe.Pointer(&buf))).Ptr - pold := ((*rt.GoSlice)(unsafe.Pointer(&old))).Ptr + *buf = encodeFinish(*old, opts) + pbuf := ((*rt.GoSlice)(unsafe.Pointer(buf))).Ptr + pold := ((*rt.GoSlice)(unsafe.Pointer(old))).Ptr /* return when allocated a new buffer */ if pbuf != pold { - freeBytes(old) - return buf, nil + vars.FreeBytes(old) + return *buf, nil } /* make a copy of the result */ - ret = make([]byte, len(buf)) - copy(ret, buf) - - freeBytes(buf) + if rt.CanSizeResue(cap(*buf)) { + ret = make([]byte, len(*buf)) + copy(ret, *buf) + vars.FreeBytes(buf) + } else { + ret = *buf + } + /* return the buffer into pool */ return ret, nil } @@ -197,7 +207,7 @@ func Encode(val interface{}, opts Options) ([]byte, error) { // EncodeInto is like Encode but uses a user-supplied buffer instead of allocating // a new one. func EncodeInto(buf *[]byte, val interface{}, opts Options) error { - err := encodeInto(buf, val, opts) + err := encodeIntoCheckRace(buf, val, opts) if err != nil { return err } @@ -206,15 +216,15 @@ func EncodeInto(buf *[]byte, val interface{}, opts Options) error { } func encodeInto(buf *[]byte, val interface{}, opts Options) error { - stk := newStack() + stk := vars.NewStack() efv := rt.UnpackEface(val) err := encodeTypedPointer(buf, efv.Type, &efv.Value, stk, uint64(opts)) /* return the stack into pool */ if err != nil { - resetStack(stk) + vars.ResetStack(stk) } - freeStack(stk) + vars.FreeStack(stk) /* avoid GC ahead */ runtime.KeepAlive(buf) @@ -226,13 +236,12 @@ func encodeFinish(buf []byte, opts Options) []byte { if opts & EscapeHTML != 0 { buf = HTMLEscape(nil, buf) } - if opts & ValidateString != 0 && !utf8.Validate(buf) { + if (opts & ValidateString != 0) && !utf8.Validate(buf) { buf = utf8.CorrectWith(nil, buf, `\ufffd`) } return buf } -var typeByte = rt.UnpackType(reflect.TypeOf(byte(0))) // HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029 // characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029 @@ -241,7 +250,7 @@ var typeByte = rt.UnpackType(reflect.TypeOf(byte(0))) // escaping within