首页 > 其他分享 >osg使用整理(12):SSAO屏幕空间环境光遮蔽

osg使用整理(12):SSAO屏幕空间环境光遮蔽

时间:2024-07-04 20:41:40浏览次数:14  
标签:环境光 12 1.0 Texture height camera SSAO new osg

一、基础概念

1、SSAO:通过将褶皱、孔洞和非常靠近墙面变暗的方法,近似模拟间接光照。SSAO称为屏幕空间环境光遮蔽 ,使用屏幕空间场景的深度而不是真实的几何体数据来确定遮蔽量,速度快效果好。

2、实现原理:根据物体表面法线方向生成一个半球随机深度采样,主要看物体周围深度值大小,通过这个值来确定是否被遮蔽。

3、关键点:

​ a. 投影采样点到屏幕空间,然后获取深度纹理

​ b. 采样深度缓冲

​ c. 如果采样位置比深度纹理深度大,说明被遮挡,遮挡系数增加

​ 由此发现采样密度决定了最后遮蔽效果质量,但过多的采样点会导致渲染卡顿。可以通过随机旋转采样核,达到采样点少且遮蔽效果好的目的。

4、法向半球采样步骤:

​ a. 生成随机三维点坐标,分布类似于朝向z轴的半球

//随机数生成函数
int xorshift32()
{
	static usigned int x=1424447641;
	x^=x<<13;
	x^=x>>17;
	x^=x<<5;
	return x;
}

float random(float min,float max)
{
	return min+static_cast<float>(xorshift32()/static_cast<float>(0xFFFFFFFF/(max-min)));
}
//线性插值
float lerp(float min,float max,float t)
{
	return min*(1.0-t)+max*t;
}
osg::Vec3f* generateHemisphereSamples(int kernelSize)
{
	osg::Vec3f* kernel=new osg::Vec3f[kernelSize];
    for (int i = 0; i < kernelSize; ++i) {
        kernel[i]=osg::Vec3f(random(-0.95,0.95),random(-0.95,0.95),random(0.0,1.0));
        kernel[i].normalize();
        kernel[i] *= random(0.0f, 1.0f);
        float scale = float(i) / float(kernelSize);
        scale = lerp(0.1f, 1.0f, scale * scale);
        kernel[i] *= scale;
    }
    return kernel;
}

​ b. 离散化分布点,并保证距离中心点越近,采样点越多

kernel[i] *= random(0.0f, 1.0f);
float scale = float(i) / float(kernelSize);
scale = lerp(0.1f, 1.0f, scale * scale);
kernel[i] *= scale;

​ c. 生成噪声纹理

osg::Vec3f* generateNoise(int noise)
{
	osg::Vec3f* noiseData=new osg::Vec3f[noiseSize];
	for(int i=0;i<noiseSize;++i)
	{
		noiseData[i]=osg::Vec3f(random(-1.0,1.0),random(-1.0,1.0),0.0);
		noiseData[i].normalize();
		noiseData[i]=noiseData[i]+osg::Vec3f(1.0,1.0,1.0);
		noiseData[i]=noiseData[i]/2.f;
	}
	return noiseData[i];
}

osg::ref_ptr<osg::Texture2D> createDataTexture(int width,int height)
{
	osg::ref_ptr<osg::Texture2D> texture=new osg::Texture2D;
	osg:Image* image=new osg:Image;
	auto data=generateNoise(width*height);
	image->setImge(width,height,1,GLRGB,GL_FLOAT,(unsigned char*)data);
	texture->setImage(image);
	texture->setWrap(osg::Texture::WRAP_S,osg::Texture::REPEAT);
	texture->setWrap(osg::Texture::WRAP_T,osg::Texture::REPEAT);
	texture->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
	texture->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
	return texture;
}

5、创建延迟渲染shader

/*延迟渲染相机*/
osg::ref_ptr<RttCamera> createDeferCamera(osg::Camera::BufferComponent buffer1,osg::Texture* tex1,
										  osg::Camera::BufferComponent buffer2,osg::Texture* tex2,
                                          osg::Camera::BufferComponent buffer3,osg::Texture* tex3,int width,int height)
{
	osg::ref_ptr<RttCamera> camera=new RttCamera(width,height);
	camera->setRenderTargetImplementation(osg::Camera::RenderTargetImplementation::FRAME_BUFFER_OBJECT);
	camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
	camera->setPostDrawCallBack(new FBOPostDrawCallback);
	camera->setRenderOrder(osg::Camera::PRE_RENDER,20);
	camera->setViewPort(0,0,width,height);
	if(tex1)
	{
		tex1->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		tex1->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		camera->attach(buffer1,tex1);
	}
	if(tex2)
	{
		tex2->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		tex2->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		camera->attach(buffer2,tex2);
	}
	if(tex3)
	{
		tex3->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		tex3->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		camera->attach(buffer3,tex3);
	}
	///顶点着色器
	const char* vertCode=R"(
		#version 330
		layout(location = 0) in vec3 Position;
		layout(location = 2) in vec3 normal;
		layout(location = 3) in vec3 TexCoord;
		
		uniform mat4 osg_ModelViewProjectionMatrix;
		uniform mat4 osg_ModelViewMatrix;
		uniform mat4 osg_NormalMatrix;
		
		out vec3 vNormal;
		out vec2 texCoord;
		out vec4 fragPos;
		void main()
		{
			texCoord=TexCoord;
			fragPos=osg_ModelViewMatrix*vec4(Position,1.0);
			vec4 viewNorm=transpose(inverse(osg_ModelViewMatrix))*vec4(-normal,1.0);
			vNormal=normalize(viewNorm.xyz);
			gl_Position=osg_ModelViewProjectionMatrix*vec4(Position,1.0);
		}
	)";
	
	const char* fragCode=R"(
		#version 330 core
		uniform vec3 frontCol=vec3(1.0,0.0,0.2);
        layout (location = 0) out vec4 gColor;
        layout (location = 1) out vec4 gNormal;
        layout (location = 2) out vec4 gPosition;

        in vec2 texCoord;
        in vec4 fragPos;
        in vec3 vNormal;

        void main()
        {    
            // Store the fragment position vector in the first gbuffer texture
            gPosition.xyz = fragPos.xyz;
            // Also store the per-fragment normals into the gbuffer
            gNormal = vec4(vNormal,1.0);          
            gColor=vec4(frontCol,1.0);
        }
	)";
	
	osg::ref_ptr<osg::Shader> vertShader=new osg::Shader(osg::Shader::VERTEX,vertCode);
    osg::ref_ptr<osg::Shader> fragShader=new osg::Shader(osg::Shader::FRAGMENT,fragCode);
    osg::ref_ptr<osg::Program>  program=new osg::Program;
    program->addShader(vertShader);
    program->addShader(fragShader);
    camera->getOrCreateStateSet()->setAttributeAndModes(program,OVERRIDE_ON);
    return camera;
}

6、创建ssao的shader。读sampleDepth出深度缓冲区(uTexLinearDepth)。如果它在样本位置的前面,则样本位于几何图形的“内部”并有助于遮挡。如果sampleDepth在样本位置的后面,则样本不会对遮挡因子做出贡献。引入rangeCheck有助于防止较大的深度不连续性之间的错误遮挡。

osg::ref_ptr<RttCamera> createSSAOCamera(osg::Texture* postionTex,osg::Texture* normalTex,osg::Matrix& projMat,osg::Camera::BufferComponent buffer,osg::Texture* tex,int width,int height)
{
	osg::ref_ptr<RttCamera> camera=new RttCamera(width,height);
	camera->setRenderTargetImplementation(osg::Camera::RenderTargetImplementation::FRAME_BUFFER_OBJECT);
	camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
	camera->setPostDrawCallBack(new FBOPostDrawCallback);
	camera->setRenderOrder(osg::Camera::PRE_RENDER,20);
	camera->setViewPort(0,0,width,height);
	if(tex)
	{
		tex1->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		tex1->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		
		camera->setViewPort(0,0,tex->getTextureWidth(),tex->getTextureHeight());
		camera->attach(buffer,tex);
	}
	
	int noise=4,kernelSize=8;
	float radius=5.f,power=3.f;
	///创建相机的stateset
	auto ss=camera->getOrcreateStateSet();
	ss->addUniform(new osg::Uniform("noiseTexture",0));
	ss->setTextureAttributeAndModes(0,createDatatexture(noiseSize,noiseSize));
	ss->addUniform(new osg::Uniform("postionTex",1));
	ss->setTextureAttributeAndModes(1,postionTex);
	ss->addUniform(new osg::Uniform("normalTex",2));
	ss->setTextureAttributeAndModes(2,normalTex);
	
	ss->addUniform(new osg::Uniform("ssaoRadius",radius));
	ss->addUniform(new osg::Uniform("ssaoPower",power));
	ss->addUniform(new osg::Uniform("kernelSize",kernelSize*kernelSize));
	ss->addUniform(new osg::Uniform("noiseTextureRcp",osg::Vec2(width/noiseSize,height/noiseSize)));
	ss->addUniform(new osg::Uniform("projMat",(osg::Matrixf)projMat));
	///创建采样半球随机数组
	auto kernelUniform=new osg::Uniform(osg::Uniform::FLOAT_VEC3,"ssaoKernel",kernelSize*kernelSize);
	auto kernelData=generateHemisphereSamples(kernelSize*kernelSize);
	for(int i=0;i<kernelSize*kernelSize;i++)
	{
		kernelUniform->setElement(i,kernelData[i]);
	}
    ///顶点着色器
	const char* vertCode=R"(
		#version 330
		layout(location = 0) in vec3 Position;
		layout(location = 3) in vec3 TexCoord;
		
		uniform mat4 osg_ModelViewProjectionMatrix;
		uniform mat4 osg_ModelViewMatrix;
		uniform mat4 osg_NormalMatrix;
		
		out vec2 texCoord;
		void main()
		{
			texCoord=TexCoord;
			gl_Position=osg_ModelViewProjectionMatrix*vec4(Position,1.0);
		}
	)";
	///片段着色器
	const char* fragCode=R"(
		#version 330 core
		uniform sampler2D positionTex;
		uniform sampler2D normalTex;
		uniform sampler2D noiseTex;
		
		const int MAX_KERNEL_SIZE=128;
		uniform vec3 ssaoKernel[MAX_KERNEL_SIZE];
        
		uniform mat4 projMatrix;
		uniform vec2 noiseTextureRep;
		uniform int kernelSize;
		uniform float ssaoRadius;
		uniform float ssaoPower;
		const float bias=0.0;
		
        void main()
        {    
            // 计算屏幕坐标系下像素点位置
            vec3 fragPos=texture2D(positionTex,texCoord).xyz;
            // 计算屏幕坐标系下的法线
            vec3 normal=normalize(texture2D(normalTex,texCoord).xyz);
            // 计算随机噪声向量
            vec3 rvec=texture2D(noiseTex,texCoord*noiseTextureRcp).xyz;
            //计算切线空间到屏幕空间转换矩阵
            vec3 tangent=normalize(rvec-dot(rvec,normal)*normal);
            vec3 bitangent=cross(tangent,normal);
            mat3 tbn=mat3(tangent,bitangent,normal);
            
            float occlusion=0.0;
            for(int i=0;i<kerSize;++i)
            {
            	//获取采样位置
            	vec3 _sample=fragPos+(tbn*ssaoKernel[i])*ssaoRadius;
            	//投影采样位置
            	vec4 offset=projMatrix*vec4(_sample,1.0);
            	offset.xyz/=offset.w;
            	offset.xyz=offset.xyz*0.5+0.5;
            	//获取采样深度
            	float sampleDepth=texture2D(postionTex,offset.xy).z;
            	float dist=abs(fragPos.z-sampleDepth);
            	float rangeCheck=smoothstep(0.0,1.0,ssaoRadius/dist);
            	occlusion+=rangeCheck*(sampleDepth>=_sample.z+bias?1.0:0.0);
            }
            occlusion=1.0-(occlusion/float(kernelSize));
            fragColor=vec4(vec3(occlusion),1.0);
        }
	)";
	///创建四边形顶点
	osg::ref_ptr<osg:Vec3Array> vertices= new osg::Vec3Array;
	vertices->push_back(osg::Vec3(-width,-height,0.f));
	vertices->push_back(osg::Vec3(width,-height,0.f));
	vertices->push_back(osg::Vec3(width,height,0.f));
	vertices->push_back(osg::Vec3(width,-height,0.f));
	///创建四边形法线
	osg::ref_ptr<osg:Vec3Array> normals= new osg::Vec3Array;
	normals->push_back(osg::Vec3(0.0,0.0,2.f));
	///创建四边形纹理坐标
	osg::ref_ptr<osg:Vec2Array> texCoords= new osg::Vec2Array;
	texCoords->push_back(osg::Vec2(1.0,0.f));
	texCoords->push_back(osg::Vec2(0.0,0.f));
	texCoords->push_back(osg::Vec2(0.0,1.f));
	texCoords->push_back(osg::Vec2(1.0,1.f));
	///创建四边形几何
	osg::ref_ptr<osg:Geometry> quad= new osg::Geometry;
	quad->setVertexArray(vertices);
	quad->setNormalArray(normals);
	quad->setTexCoordArray(0,texCoords);
	quad->addPrimitiveSet(new osg::DrawArrays(GL_QUADS,0,4));
	///创建四边形节点
	osg::ref_ptr<osg::Geode> quadGeode=new osg::Geode;
	quadGeode->addDrawable(quad);
	osg::ref_ptr<osg::Shader> vertShader=new osg::Shader(osg::Shader::VERTEX,vertCode);
    osg::ref_ptr<osg::Shader> fragShader=new osg::Shader(osg::Shader::FRAGMENT,fragCode);
    osg::ref_ptr<osg::Program>  program=new osg::Program;
    program->addShader(vertShader);
    program->addShader(fragShader);
    quadGeode->getOrCreateStateSet()->setAttributeAndModes(program,OVERRIDE_ON);
    
    camera->addChild(quadGeode);
    camera-》setReferenceFrame(osg::Transform::ABSOLUTE_RF);
    camera->setProjectionMatrix(osg::Matrix::ortho2D(widht,-width,-height,height));    
	return camera;
}	

7、创建 blur shader 通过平均4X4周围的每个像素颜色值来避免噪声图样。

osg::ref_ptr<RttCamera> createBlurCamera(osg::Texture* colorTex,osg::Texture* ssaoTex,osg::Texture* normalTex,int width,int height)
{
	osg::ref_ptr<RttCamera> camera=new RttCamera(width,height);
	camera->setRenderTargetImplementation(osg::Camera::RenderTargetImplementation::FRAME_BUFFER_OBJECT);
	camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
	camera->setPostDrawCallBack(new FBOPostDrawCallback);
	camera->setRenderOrder(osg::Camera::POST_RENDER,100);
	camera->setViewPort(0,0,width,height);

	
	///创建相机的stateset
	auto ss=camera->getOrcreateStateSet();
	ss->addUniform(new osg::Uniform("colorTex",0));
	ss->setTextureAttributeAndModes(0,colorTex);
	ss->addUniform(new osg::Uniform("ssaoTex",1));
	ss->setTextureAttributeAndModes(1,ssaoTex);
	ss->addUniform(new osg::Uniform("normalTex",2));
	ss->setTextureAttributeAndModes(2,normalTex);
	
    ///顶点着色器
	const char* vertCode=R"(
		#version 330
		layout(location = 0) in vec3 Position;
		layout(location = 3) in vec3 TexCoord;
		
		uniform mat4 osg_ModelViewProjectionMatrix;
		uniform mat4 osg_ModelViewMatrix;
		uniform mat4 osg_NormalMatrix;
		
		out vec2 texCoord;
		void main()
		{
			texCoord=TexCoord;
			gl_Position=osg_ModelViewProjectionMatrix*vec4(Position,1.0);
		}
	)";
	///片段着色器
	const char* fragCode=R"(
		#version 330 core
		uniform sampler2D colorTex;
		uniform sampler2D ssaoTex;
		uniform sampler2D normalTex;
		
		const int blurSize=4;
		uniform mat4 osg_ModelViewMatrix;
		uniform float ambFactor=0.5;
		uniform float diffFactor=0.8;
		uniform float specFactor=0.15;
		uniform float shininess=128;
		
		vec3 calcDirLight(vec3 color,vec3 normal,float ambient,float diffuse,float specular,int strenth)
		{
			float diff=max=(0,dot(normal,lightDir));
			float spec=pow(diff,strenth);
			return color*(ambient+diff*diffuse)+spec*specular;
		}
		in vec2 texCoord;
		out vec4 fragColor;
        void main()
        {    
            vec2 texelSize = 1.0 / vec2(textureSize(ssaoTex, 0));
   			float result = 0.0;
   			for (int i = 0; i < uBlurSize; ++i) 
   			{
      			for (int j = 0; j < uBlurSize; ++j)
                {
         			vec2 offset = (vec2(-2.f) + vec2(float(x), float(y))) * texelSize;
         			result += texture(ssaoTex, texCoord + offset).r;
      			}
   			}
 
   			result = result / float(blurSize * blurSize);
   			vec3 color=texture2D(colorTex,texCoord).rgb;
   			vec3 normal=texture2D(normalTex,texCoord).rgb;
   			vec4 viewNorm=osg_ModelViewMatrix*vec4(-normal,1.0);
   			normal=normalize(viewNorm.xyz);
            vec3 lightCol=calcDirLight(color,normal,ambFactor,diffFactor,specFactor,shininess);
            lightCol=mix(lightCol,vec3(1.0),step(normal.z,0.0));
            fragCol=vec4(lightCol,1.0);
        }
	)";
	///创建四边形顶点
	osg::ref_ptr<osg:Vec3Array> vertices= new osg::Vec3Array;
	vertices->push_back(osg::Vec3(-width,-height,0.f));
	vertices->push_back(osg::Vec3(width,-height,0.f));
	vertices->push_back(osg::Vec3(width,height,0.f));
	vertices->push_back(osg::Vec3(width,-height,0.f));
	///创建四边形法线
	osg::ref_ptr<osg:Vec3Array> normals= new osg::Vec3Array;
	normals->push_back(osg::Vec3(0.0,0.0,2.f));
	///创建四边形纹理坐标
	osg::ref_ptr<osg:Vec2Array> texCoords= new osg::Vec2Array;
	texCoords->push_back(osg::Vec2(1.0,0.f));
	texCoords->push_back(osg::Vec2(0.0,0.f));
	texCoords->push_back(osg::Vec2(0.0,1.f));
	texCoords->push_back(osg::Vec2(1.0,1.f));
	///创建四边形几何
	osg::ref_ptr<osg:Geometry> quad= new osg::Geometry;
	quad->setVertexArray(vertices);
	quad->setNormalArray(normals);
	quad->setTexCoordArray(0,texCoords);
	quad->addPrimitiveSet(new osg::DrawArrays(GL_QUADS,0,4));
	///创建四边形节点
	osg::ref_ptr<osg::Geode> quadGeode=new osg::Geode;
	quadGeode->addDrawable(quad);
	osg::ref_ptr<osg::Shader> vertShader=new osg::Shader(osg::Shader::VERTEX,vertCode);
    osg::ref_ptr<osg::Shader> fragShader=new osg::Shader(osg::Shader::FRAGMENT,fragCode);
    osg::ref_ptr<osg::Program>  program=new osg::Program;
    program->addShader(vertShader);
    program->addShader(fragShader);
    quadGeode->getOrCreateStateSet()->setAttributeAndModes(program,OVERRIDE_ON);
    
    camera->addChild(quadGeode);
    camera-》setReferenceFrame(osg::Transform::ABSOLUTE_RF);
    camera->setProjectionMatrix(osg::Matrix::ortho2D(widht,-width,-height,height));    
	return camera;
}	

8、将G_Buffer相机节点、ssao相机节点和模糊相机节点挂载到根节点

///首先创建延迟渲染pass,输出模型深度、颜色、法线纹理
auto positionTex=createColorTexture(width,height);
auto colorTex=createColorTexture(width,height);
auto normalTex=createColorTexture(width,height); 
auto pass1=createPhongcamera(osg::Camera::COLOR_BUFFER2,positionTex, osg::Camera::COLOR_BUFFER0,colorTex,osg::Camera::COLOR_BUFFER1,normalTex,width,height);
pass1->setRenderOrder(osg::Camera::PRE_RENDER,20);
pass1->setClearColor(osg::vec4(1.0,1.0,1.0,1.0));
pass1->addChild(model);
///然后创建ssao效果pass,输出ssao纹理
auto ssaoTex=createColorTexture(width,height);
auto pass2=createSSAOCamera(positionTex,normalTex,projMat,osg::Camera::COLOR_BUFFER,ssaoTex,width,height);
pass2->setRenderOrder(osg::Camera::PRE_RENDER,100);
pass2->setClearColor(osg::vec4(1.0,1.0,1.0,1.0));
///最后创建模糊效果pass
auto pass3 =createBlurCamera(colorTex,ssaoTex,normalTex,width,height);
pass3->setRenderOrder(osg::Camera::POST_RENDER,300);
pass3->setClearColor(osg::vec4(1.0,1.0,1.0,1.0));
root->addChild(pass1);
root->addChild(pass2);
root->addChild(pass3);  

标签:环境光,12,1.0,Texture,height,camera,SSAO,new,osg
From: https://www.cnblogs.com/wangxydela/p/18284632

相关文章

  • selenium12_HTML测试报告(run_all)
    在run_all.py中编写如下脚本:#cording:utf-8importunittestimportosfromcommonimportHTMLTestRunner_cn#os.path.dirname:获取当前文件所在的文件夹路径。os.path.realpath(__file__):根据不同的系统自动获取绝对路径,包含文件名cur_path=os.path.dirname(os.p......
  • 【资源分享】初中生谭景元开发的Windows12真漂亮
    这个名为Windows12网页版的项目的作者谭景元(网名:星源),是一位年仅14岁的中国初中生。据他公开的简介显示,他出生于2009年5月,在成都完成了小学教育并目前就读于当地的初中。尽管年纪不大,他已经获得了两个显赫的奖项:CSP普及组一等奖(CSP是CCF面向社会非专业人士推出的能力认证......
  • 16位数据转128位数据
    输入数据:data_in[15:0]  valid_in输出数据:data_out[127:0] valid_out思维逻辑很简单,看仿真。valid_out计数到7的时候拉高一次即可。moduledata_16_128(inputclk,inputrst_n,inputvalid_in,input[......
  • windows server 2012 rc使用opencv库失败(缺dll解决办法)
    vs2015-vs2019封装的库,需要安装对应的运行时本人本地安装过vs2019,所以有vcredist_x64.exe安装程序(或者使用DirectX一键安装缺失库,但也有可能安装2019运行库失败;所以还是一下方法一步一步安装)vcredist_x64.exe路径(更具自己路径);或者使用everything查找vcredist_x64.exe文......
  • OpenHarmony移植小型系统exynos4412(二)
    产品配置规则1、概述产品解决方案为基于开发板的完整产品,主要包含产品对OS的适配、部件拼装配置、启动配置和文件系统配置等。产品解决方案的源码路径规则为:vendor/{产品解决方案厂商}/{产品名称}_。产品解决方案的目录树规则如下:vendor└──company#产品解决方案厂......
  • 用于通信设备测试和测量: ADS8900BRGER、ADS54J20IRMP、ADC12DJ3200AAV模数转换器ADC
    1、ADC12DJ3200AAV 12位双通道3.2GSPS或单通道6.4GSPS射频采样模数转换器ADCADC12DJ3200采用具有多达16个串行通道和子类1兼容性的高速JESD204B输出接口,可实现确定性延迟和多器件同步。特性•ADC内核:–12位分辨率–单通道模式下采样率高达6.4GSPS–双通......
  • 12条技巧,打造出超高性能的接口API
    插:AI时代,程序员或多或少要了解些人工智能,前些天发现了一个巨牛的人工智能学习网站,通俗易懂,风趣幽默,忍不住分享一下给大家(前言–人工智能教程)坚持不懈,越努力越幸运,大家一起学习鸭~~~1.并行处理简要说明举个例子:在价格查询链路中,我们需要获取多种独立的价格配置项......
  • VMware vSphere Tanzu部署_12_下载使用Tanzu-K8S工具
    下载使用Tanzu-K8S工具Tanzu-K8S工具支持windows、linux、macoswindows下载安装tanzu-k8s工具访问命名空间内的链接到CLI工具链接将二进制文件复制到windows内的system32文件夹内linux下载安装tanzu-k8s工具#192.168.203.194这个IP地址替换为您环境下看到的IP地址......
  • 【网络安全】副业兼职日入12k,网安人不接私活就太可惜了!
    暑假来了,很多同学后台私信我求做兼职的路子,这里,我整理了一份详细攻略,请大家务必查收,这可能会帮你把几个学期的生活费都赚够!Up刚工作就开始做挖漏洞兼职,最高一次赚了12k,后面还拿过奖。 近两年互联网裁员潮,我却从未担心过,因为我的副业收入就有主业三倍之多。行业寒冬,网安人......
  • rk3128 android4.4找不到内部存储空间
    修改如下:device/rockchip/rk312xdiff--gita/fstab.rk30board.bootmode.emmcb/fstab.rk30board.bootmode.emmcindex4b04d83..276b77b100755---a/fstab.rk30board.bootmode.emmc+++b/fstab.rk30board.bootmode.emmc@@-8,10+8,10@@/dev/block/platform/1021c000.......