Lego Cleanup

Lego Cleanup

Lego Cleanup

Unity + AI

Unity + AI

Unity + AI

Exploring VR Interaction through AI-Powered Prototyping

Exploring VR Interaction through AI-Powered Prototyping

Exploring VR Interaction through AI-Powered Prototyping

When OpenAI released GPT-4 in 2023, I began exploring how AI could assist in building Unity scripts with zero coding experience. This quick project demonstrates how AI-powered assistance enabled me to rapidly prototype and create novel XR interactions.

Using Unity and the Oculus integration, I built a Black Hole mechanic that allows users to collect scattered LEGO bricks through hand gestures.

When OpenAI released GPT-4 in 2023, I began exploring how AI could assist in building Unity scripts with zero coding experience. This quick project demonstrates how AI-powered assistance enabled me to rapidly prototype and create novel XR interactions.

Using Unity and the Oculus integration, I built a Black Hole mechanic that allows users to collect scattered LEGO bricks through hand gestures.

When OpenAI released GPT-4 in 2023, I began exploring how AI could assist in building Unity scripts with zero coding experience. This quick project demonstrates how AI-powered assistance enabled me to rapidly prototype and create novel XR interactions.

Using Unity and the Oculus integration, I built a Black Hole mechanic that allows users to collect scattered LEGO bricks through hand gestures.

Thumb Gesture Activation (BlackHoleActivator)

Thumb Gesture Activation (BlackHoleActivator)

Thumb Gesture Activation (BlackHoleActivator)

One of the key features of this exploration is the thumb gesture activation. By pressing the thumb against the palm, with the hand facing the user, the BlackHole appears. This gesture was intentionally designed to avoid false activations when interacting with LEGO pieces. It's only triggered when the palm is facing the camera, making it a reliable and discrete activation method that fits naturally with the rest of the interaction.

One of the key features of this exploration is the thumb gesture activation. By pressing the thumb against the palm, with the hand facing the user, the BlackHole appears. This gesture was intentionally designed to avoid false activations when interacting with LEGO pieces. It's only triggered when the palm is facing the camera, making it a reliable and discrete activation method that fits naturally with the rest of the interaction.

using UnityEngine;

public class BlackHoleActivator : MonoBehaviour
{
    public OVRHand leftHand;  // Reference to the left hand
    public GameObject blackHole;  // Reference to the BlackHole object
    public Collider palmCollider;  // Collider on the palm
    public float scaleDuration = 0.5f;  // Duration for scale effect, adjustable in the inspector

    private bool blackHoleActive = false;
    private bool isTouching = false;
    private ScaleEffect scaleEffect;

    void Start()
    {
        scaleEffect = blackHole.GetComponent<ScaleEffect>();
        scaleEffect.scaleDuration = scaleDuration;  // Set the scale duration
        blackHole.transform.localScale = Vector3.zero;  // Start with zero scale
        blackHole.SetActive(false);  // Ensure BlackHole is initially inactive
    }

    void Update()
    {
        if (leftHand.IsTracked && IsThumbTouchingPalm())
        {
            if (!isTouching)
            {
                ToggleBlackHole();
                isTouching = true;
            }
        }
        else
        {
            isTouching = false;
        }
    }

    private bool IsThumbTouchingPalm()
    {
        Collider[] hitColliders = Physics.OverlapBox(palmCollider.bounds.center, palmCollider.bounds.extents, palmCollider.transform.rotation);
        foreach (var hitCollider in hitColliders)
        {
            if (hitCollider.CompareTag("Thumb"))
            {
                return true;
            }
        }
        return false;
    }

    private void ToggleBlackHole()
    {
        blackHoleActive = !blackHoleActive;
        if (blackHoleActive)
        {
            blackHole.SetActive(true);
            scaleEffect.ScaleIn();
        }
        else
        {
            scaleEffect.ScaleOut(() => blackHole.SetActive(false));
        }
        Debug.Log("BlackHole toggled: " + blackHoleActive);

        // Disable or enable gravity for all objects with a Rigidbody in the scene
        Rigidbody[] rigidbodies = FindObjectsOfType<Rigidbody>();
        foreach (var rb in rigidbodies)
        {
            rb.useGravity = !blackHoleActive;
        }

        // Toggle the BlackHole script
        blackHole.GetComponent<BlackHole>().enabled = blackHoleActive

using UnityEngine;

public class BlackHoleActivator : MonoBehaviour
{
    public OVRHand leftHand;  // Reference to the left hand
    public GameObject blackHole;  // Reference to the BlackHole object
    public Collider palmCollider;  // Collider on the palm
    public float scaleDuration = 0.5f;  // Duration for scale effect, adjustable in the inspector

    private bool blackHoleActive = false;
    private bool isTouching = false;
    private ScaleEffect scaleEffect;

    void Start()
    {
        scaleEffect = blackHole.GetComponent<ScaleEffect>();
        scaleEffect.scaleDuration = scaleDuration;  // Set the scale duration
        blackHole.transform.localScale = Vector3.zero;  // Start with zero scale
        blackHole.SetActive(false);  // Ensure BlackHole is initially inactive
    }

    void Update()
    {
        if (leftHand.IsTracked && IsThumbTouchingPalm())
        {
            if (!isTouching)
            {
                ToggleBlackHole();
                isTouching = true;
            }
        }
        else
        {
            isTouching = false;
        }
    }

    private bool IsThumbTouchingPalm()
    {
        Collider[] hitColliders = Physics.OverlapBox(palmCollider.bounds.center, palmCollider.bounds.extents, palmCollider.transform.rotation);
        foreach (var hitCollider in hitColliders)
        {
            if (hitCollider.CompareTag("Thumb"))
            {
                return true;
            }
        }
        return false;
    }

    private void ToggleBlackHole()
    {
        blackHoleActive = !blackHoleActive;
        if (blackHoleActive)
        {
            blackHole.SetActive(true);
            scaleEffect.ScaleIn();
        }
        else
        {
            scaleEffect.ScaleOut(() => blackHole.SetActive(false));
        }
        Debug.Log("BlackHole toggled: " + blackHoleActive);

        // Disable or enable gravity for all objects with a Rigidbody in the scene
        Rigidbody[] rigidbodies = FindObjectsOfType<Rigidbody>();
        foreach (var rb in rigidbodies)
        {
            rb.useGravity = !blackHoleActive;
        }

        // Toggle the BlackHole script
        blackHole.GetComponent<BlackHole>().enabled = blackHoleActive

Hand-Controlled Movement (BlackHole Positioning)

Hand-Controlled Movement (BlackHole Positioning)

Hand-Controlled Movement (BlackHole Positioning)

The middle finger pinch gesture moves the BlackHole without relying on far rays. It’s designed to avoid conflict with standard index finger pinching used for grabbing bricks, ensuring smooth control.

The middle finger pinch gesture moves the BlackHole without relying on far rays. It’s designed to avoid conflict with standard index finger pinching used for grabbing bricks, ensuring smooth control.

using UnityEngine;
using Oculus.Interaction;

public class HandControlledMovement : MonoBehaviour
{
    public OVRHand leftHand;
    public OVRHand rightHand;
    public Transform blackHole;
    private Vector3 initialOffsetLeft;
    private Vector3 initialOffsetRight;
    private bool isControllingObjectLeft = false;
    private bool isControllingObjectRight = false;

    void Update()
    {
        if (leftHand.IsTracked && IsPinchingGesture(leftHand) && IsHandFacing(leftHand, blackHole))
        {
            if (!isControllingObjectLeft)
            {
                initialOffsetLeft = blackHole.position - leftHand.PointerPose.position;
                isControllingObjectLeft = true;
            }
            blackHole.position = leftHand.PointerPose.position + initialOffsetLeft;
        }
        else
        {
            isControllingObjectLeft = false;
        }

        if (rightHand.IsTracked && IsPinchingGesture(rightHand) && IsHandFacing(rightHand, blackHole))
        {
            if (!isControllingObjectRight)
            {
                initialOffsetRight = blackHole.position - rightHand.PointerPose.position;
                isControllingObjectRight = true;
            }
            blackHole.position = rightHand.PointerPose.position + initialOffsetRight;
        }
        else
        {
            isControllingObjectRight = false;
        }
    }

    private bool IsPinchingGesture(OVRHand hand)
    {
        return hand.GetFingerIsPinching(OVRHand.HandFinger.Middle) && hand.GetFingerIsPinching(OVRHand.HandFinger.Thumb);
    }

    private bool IsHandFacing(OVRHand hand, Transform target)
    {
        Vector3 handForward = hand.PointerPose.forward;
        Vector3 toTarget = (target.position - hand.PointerPose.position).normalized;
        float dotProduct = Vector3.Dot(handForward, toTarget);
        return dotProduct > 0.7f

using UnityEngine;
using Oculus.Interaction;

public class HandControlledMovement : MonoBehaviour
{
    public OVRHand leftHand;
    public OVRHand rightHand;
    public Transform blackHole;
    private Vector3 initialOffsetLeft;
    private Vector3 initialOffsetRight;
    private bool isControllingObjectLeft = false;
    private bool isControllingObjectRight = false;

    void Update()
    {
        if (leftHand.IsTracked && IsPinchingGesture(leftHand) && IsHandFacing(leftHand, blackHole))
        {
            if (!isControllingObjectLeft)
            {
                initialOffsetLeft = blackHole.position - leftHand.PointerPose.position;
                isControllingObjectLeft = true;
            }
            blackHole.position = leftHand.PointerPose.position + initialOffsetLeft;
        }
        else
        {
            isControllingObjectLeft = false;
        }

        if (rightHand.IsTracked && IsPinchingGesture(rightHand) && IsHandFacing(rightHand, blackHole))
        {
            if (!isControllingObjectRight)
            {
                initialOffsetRight = blackHole.position - rightHand.PointerPose.position;
                isControllingObjectRight = true;
            }
            blackHole.position = rightHand.PointerPose.position + initialOffsetRight;
        }
        else
        {
            isControllingObjectRight = false;
        }
    }

    private bool IsPinchingGesture(OVRHand hand)
    {
        return hand.GetFingerIsPinching(OVRHand.HandFinger.Middle) && hand.GetFingerIsPinching(OVRHand.HandFinger.Thumb);
    }

    private bool IsHandFacing(OVRHand hand, Transform target)
    {
        Vector3 handForward = hand.PointerPose.forward;
        Vector3 toTarget = (target.position - hand.PointerPose.position).normalized;
        float dotProduct = Vector3.Dot(handForward, toTarget);
        return dotProduct > 0.7f

Thanks, ChatGPT 😊

Thanks, ChatGPT 😊

Thanks, ChatGPT 😊