- 基于虚幻引擎5.0.3,提供运行时的C++接口。该系列接口实现了虚拟人的加载、表情、动画、换装、捏脸、AI问答等功能。
- Visual Studio 2022/JetBrains Rider 2023
- Windows SDK 10.0.22621.0
- MSVC v143 - VS 2022 C++ x64/x86 build tools 14.35.32215 (不要使用MSVC v143 14.39)
- 虚幻引擎(Unreal Engine)5.0.3
- 获取Demo
git pull git@github.com:WeHome007/NextCAS-UE.git - 打包项目(!!!由于所有模型资产都是pak在中。目前只能打包后运行才能正常加载!!!)
- 启动项目:
Demo.exe -at="AccessToken" [-aid="AvatarId"] [-q="Question"]
-at: 使用“获取鉴权令牌”步骤中生成的令牌
-aid: 形象ID。参数不输入则使用默认值,具体看Demo代码。
-q:Demo启动后,向虚拟人提的问题。
[/Script/Engine.RendererSettings]
r.GPUSkin.Support16BitBoneIndex=True
r.GPUSkin.UnlimitedBoneInfluences=True
r.SkinCache.BlendUsingVertexColorForRecomputeTangents=2
r.SkinCache.CompileShaders=True
r.SkinCache.DefaultBehavior=0
SkeletalMesh.UseExperimentalChunking=1
r.PostProcessing.PropagateAlpha=2
r.DefaultBackBufferPixelFormat=4
r.Streaming.PoolSize=5000
[ConsoleVariables]
fx.Niagara.ForceLastTickGroup=1
r.streaming.MaxTempMemoryAllowed=100
[/Script/UnrealEd.ProjectPackagingSettings]
UsePakFile=True
bUseIoStore=False
bUseZenStore=False
bShareMaterialShaderCode=False
bSharedMaterialNativeLibraries=False
"Plugins": [
{
"Name": "NextCAS-SDK",
"Enabled": true
}
]
PrivateDependencyModuleNames.AddRange(new string[] {
"NextHumanSDK", // 虚拟人
"NextAgent" // AI问答
});
#include "INextHumanSDK.h"
#include "NHError.h"
INextHumanSDKModule::Get().Initialize(AccessToken, [=](int32 Code, const FString& Message) {
if (Code == FNHError::SUCCESS) {
// 初始化成功
} else {
// 初始化失败, Message包含错误信息
}
});
#include "NextAvatar.h"
ANextAvatar* Avatar = World->SpawnActor<ANextAvatar>(FVector(0, 0, 0), FRotator(0, 0, 0));
Avatar->SetAvatarId(AvatarId, [=](int32 Code, const FString& Message, TMap<FString, ANextAvatar::FBundleInfo> BundleInfos) {
});
#include "NHAgentComponent.h"
UNHAgentComponent* Agent = Cast<UNHAgentComponent>(Avatar->GetComponentByClass(UNHAgentComponent::StaticClass()));
if (!Agent) {
Agent = NewObject<UNHAgentComponent>(Avatar);
Agent->ComponentTags.Add(TEXT("CtrlFBF"));
Agent->RegisterComponent();
Agent->AttachToComponent(Avatar->GetRootComponent(), FAttachmentTransformRules::KeepRelativeTransform);
Agent->OnAnswer().BindLambda([=](nexthuman::sdk::FNHError Result, const FString& Text) {
});
}
Agent->Ask(Question);
Avatar->AddBundleById(TEXT("xxxx"), [=](int32 Code, const FString& Message, int64 Index) {
if (Code == FNHError::SUCCESS) { // 添加成功
// 使用添加返回的索引值来删除
Avatar->RemoveBundle(Index);
}
});
Avatar->SetAvatarId(AvatarId, [=](int32 Code, const FString& Message, TMap<FString, ANextAvatar::FBundleInfo> BundleInfos) {
for (auto& BundleInfo : BundleInfos) {
// 使用设置AvatarId返回的索引值来删除
Avatar->RemoveBundle(BundleInfo.Value.Index);
}
});
测试中
static const FString CATEGORY_MORPH_HEAD = TEXT("headshape");
static const FString CATEGORY_MORPH_FACE = TEXT("faceshape");
static const FString CATEGORY_MORPH_EYES = TEXT("eyeshape");
static const FString CATEGORY_MORPH_EARS = TEXT("earshape");
static const FString CATEGORY_MORPH_NOSE = TEXT("noseshape");
static const FString CATEGORY_MORPH_MOUTH = TEXT("mouthshape");
static const FString CATEGORY_MORPH_TEETH = TEXT("teethshape");
TEXT("headshape") NH01HeadMorphPayload.h
TEXT("faceshape") NH01FaceMorphPayload.h
TEXT("eyeshape") NH01EyesMorphPayload.h
TEXT("earshape") NH01EarsMorphPayload.h
TEXT("noseshape") NH01NoseMorphPayload.h
TEXT("mouthshape") NH01MouthMorphPayload.h
TEXT("teethshape") NH01TeethMorphPayload.h
Avatar->ChangeMorph(CATEGORY_MORPH_FACE, "width", 1.0f);
测试中
nexthuman.sdk.test -test=Avatar -action=destroy -avatarindex=0
avatarindex: 按照添加顺序获得的索引值
nexthuman.sdk.test -test=Avatar -action=create -id=avatar_205547 -x=0 -y=50 -z=0 -pitch=45 -roll=45 -yaw=45
id:形象id
x,y,z:位置
pitch,roll,yaw:旋转
nexthuman.sdk.test -test=Avatar -action=addbundle -avatarindex=0 -bundleid=hat_6257c5387c8c5f5a0aef2d12
bundleid:服装/道具的id
nexthuman.sdk.test -test=Avatar -action=removebundle -avatarindex=0 -bundleindex=5
bundleindex: 添加时返回的索引值
nexthuman.sdk.test -test=Avatar -action=changemorph -avatarindex=0 -category=faceshape -key=width -value=1.0
category:分类
key:值名称
value:值
该仓库为NextHuman对外提供的UE形式的超写实/卡通数字人集成入口,如果需要其他引擎或数字人类型的集成,可根据需求前往以下不同入口: